diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver index 115a35a76..ec910f5f2 120000 --- a/node_modules/.bin/semver +++ b/node_modules/.bin/semver @@ -1 +1 @@ -../make-dir/node_modules/semver/bin/semver.js \ No newline at end of file +../@puppeteer/browsers/node_modules/semver/bin/semver.js \ No newline at end of file diff --git a/node_modules/@actions/artifact/lib/internal/download/download-artifact.js b/node_modules/@actions/artifact/lib/internal/download/download-artifact.js index db82ab8fd..0a7535a1c 100644 --- a/node_modules/@actions/artifact/lib/internal/download/download-artifact.js +++ b/node_modules/@actions/artifact/lib/internal/download/download-artifact.js @@ -37,9 +37,6 @@ var __importDefault = (this && this.__importDefault) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); exports.downloadArtifactInternal = exports.downloadArtifactPublic = exports.streamExtractExternal = void 0; const promises_1 = __importDefault(require("fs/promises")); -const stream = __importStar(require("stream")); -const fs_1 = require("fs"); -const path = __importStar(require("path")); const github = __importStar(require("@actions/github")); const core = __importStar(require("@actions/core")); const httpClient = __importStar(require("@actions/http-client")); @@ -80,9 +77,6 @@ function streamExtract(url, directory) { return; } catch (error) { - if (error.message.includes('Malformed extraction path')) { - throw new Error(`Artifact download failed with unretryable error: ${error.message}`); - } retryCount++; core.debug(`Failed to download artifact after ${retryCount} retries due to ${error.message}. Retrying in 5 seconds...`); // wait 5 seconds before retrying @@ -105,8 +99,6 @@ function streamExtractExternal(url, directory) { response.message.destroy(new Error(`Blob storage chunk did not respond in ${timeout}ms`)); }; const timer = setTimeout(timerFn, timeout); - const createdDirectories = new Set(); - createdDirectories.add(directory); response.message .on('data', () => { timer.refresh(); @@ -116,47 +108,11 @@ function streamExtractExternal(url, directory) { clearTimeout(timer); reject(error); }) - .pipe(unzip_stream_1.default.Parse()) - .pipe(new stream.Transform({ - objectMode: true, - transform: (entry, _, callback) => __awaiter(this, void 0, void 0, function* () { - const fullPath = path.normalize(path.join(directory, entry.path)); - if (!directory.endsWith(path.sep)) { - directory += path.sep; - } - if (!fullPath.startsWith(directory)) { - reject(new Error(`Malformed extraction path: ${fullPath}`)); - } - if (entry.type === 'Directory') { - if (!createdDirectories.has(fullPath)) { - createdDirectories.add(fullPath); - yield resolveOrCreateDirectory(fullPath).then(() => { - entry.autodrain(); - callback(); - }); - } - else { - entry.autodrain(); - callback(); - } - } - else { - core.info(`Extracting artifact entry: ${fullPath}`); - if (!createdDirectories.has(path.dirname(fullPath))) { - createdDirectories.add(path.dirname(fullPath)); - yield resolveOrCreateDirectory(path.dirname(fullPath)); - } - const writeStream = (0, fs_1.createWriteStream)(fullPath); - writeStream.on('finish', callback); - writeStream.on('error', reject); - entry.pipe(writeStream); - } - }) - })) - .on('finish', () => __awaiter(this, void 0, void 0, function* () { + .pipe(unzip_stream_1.default.Extract({ path: directory })) + .on('close', () => { clearTimeout(timer); resolve(); - })) + }) .on('error', (error) => { reject(error); }); diff --git a/node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.js b/node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.js index 5ed381318..786f285d5 100644 --- a/node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.js +++ b/node_modules/@actions/artifact/lib/internal/shared/artifact-twirp-client.js @@ -85,7 +85,6 @@ class ArtifactHttpClient { catch (error) { if (error instanceof SyntaxError) { (0, core_1.debug)(`Raw Body: ${rawBody}`); - throw error; } if (error instanceof errors_1.UsageError) { throw error; diff --git a/node_modules/@actions/artifact/lib/internal/upload/blob-upload.js b/node_modules/@actions/artifact/lib/internal/upload/blob-upload.js index 3cc343c80..0505d2b29 100644 --- a/node_modules/@actions/artifact/lib/internal/upload/blob-upload.js +++ b/node_modules/@actions/artifact/lib/internal/upload/blob-upload.js @@ -42,14 +42,34 @@ const errors_1 = require("../shared/errors"); function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { return __awaiter(this, void 0, void 0, function* () { let uploadByteCount = 0; + let lastProgressTime = Date.now(); + let timeoutId; + const chunkTimer = (timeout) => { + // clear the previous timeout + if (timeoutId) { + clearTimeout(timeoutId); + } + timeoutId = setTimeout(() => { + const now = Date.now(); + // if there's been more than 30 seconds since the + // last progress event, then we'll consider the upload stalled + if (now - lastProgressTime > timeout) { + throw new Error('Upload progress stalled.'); + } + }, timeout); + return timeoutId; + }; const maxConcurrency = (0, config_1.getConcurrency)(); const bufferSize = (0, config_1.getUploadChunkSize)(); const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); + const timeoutDuration = 300000; // 30 seconds core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`); const uploadCallback = (progress) => { core.info(`Uploaded bytes ${progress.loadedBytes}`); uploadByteCount = progress.loadedBytes; + chunkTimer(timeoutDuration); + lastProgressTime = Date.now(); }; const options = { blobHTTPHeaders: { blobContentType: 'zip' }, @@ -62,6 +82,8 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check core.info('Beginning upload of artifact content to blob storage'); try { + // Start the chunk timer + timeoutId = chunkTimer(timeoutDuration); yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options); } catch (error) { @@ -70,6 +92,12 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) { } throw error; } + finally { + // clear the timeout whether or not the upload completes + if (timeoutId) { + clearTimeout(timeoutId); + } + } core.info('Finished uploading artifact content to blob storage!'); hashStream.end(); sha256Hash = hashStream.read(); diff --git a/node_modules/@actions/artifact/package.json b/node_modules/@actions/artifact/package.json index c15570813..ab84d0f23 100644 --- a/node_modules/@actions/artifact/package.json +++ b/node_modules/@actions/artifact/package.json @@ -1,6 +1,6 @@ { "name": "@actions/artifact", - "version": "2.1.4", + "version": "2.1.7", "preview": true, "description": "Actions artifact lib", "keywords": [ @@ -49,7 +49,7 @@ "@octokit/plugin-retry": "^3.0.9", "@octokit/request-error": "^5.0.0", "@protobuf-ts/plugin": "^2.2.3-alpha.1", - "archiver": "^5.3.1", + "archiver": "^7.0.1", "crypto": "^1.0.1", "jwt-decode": "^3.1.2", "twirp-ts": "^2.5.0", diff --git a/node_modules/@actions/github/node_modules/@octokit/core/dist-node/index.js b/node_modules/@actions/github/node_modules/@octokit/core/dist-node/index.js index c405227d2..9decd2a16 100644 --- a/node_modules/@actions/github/node_modules/@octokit/core/dist-node/index.js +++ b/node_modules/@actions/github/node_modules/@octokit/core/dist-node/index.js @@ -30,7 +30,7 @@ var import_graphql = require("@octokit/graphql"); var import_auth_token = require("@octokit/auth-token"); // pkg/dist-src/version.js -var VERSION = "5.1.0"; +var VERSION = "5.2.0"; // pkg/dist-src/index.js var noop = () => { diff --git a/node_modules/@actions/github/node_modules/@octokit/core/dist-src/version.js b/node_modules/@actions/github/node_modules/@octokit/core/dist-src/version.js index 2f3ce5807..efa20dbf7 100644 --- a/node_modules/@actions/github/node_modules/@octokit/core/dist-src/version.js +++ b/node_modules/@actions/github/node_modules/@octokit/core/dist-src/version.js @@ -1,4 +1,4 @@ -const VERSION = "5.1.0"; +const VERSION = "5.2.0"; export { VERSION }; diff --git a/node_modules/@actions/github/node_modules/@octokit/core/dist-web/index.js b/node_modules/@actions/github/node_modules/@octokit/core/dist-web/index.js index f72f36d6f..b37249a2f 100644 --- a/node_modules/@actions/github/node_modules/@octokit/core/dist-web/index.js +++ b/node_modules/@actions/github/node_modules/@octokit/core/dist-web/index.js @@ -6,7 +6,7 @@ import { graphql, withCustomRequest } from "@octokit/graphql"; import { createTokenAuth } from "@octokit/auth-token"; // pkg/dist-src/version.js -var VERSION = "5.1.0"; +var VERSION = "5.2.0"; // pkg/dist-src/index.js var noop = () => { diff --git a/node_modules/@actions/github/node_modules/@octokit/core/node_modules/@octokit/openapi-types/package.json b/node_modules/@actions/github/node_modules/@octokit/core/node_modules/@octokit/openapi-types/package.json new file mode 100644 index 000000000..350a5825d --- /dev/null +++ b/node_modules/@actions/github/node_modules/@octokit/core/node_modules/@octokit/openapi-types/package.json @@ -0,0 +1,21 @@ +{ + "name": "@octokit/openapi-types", + "description": "Generated TypeScript definitions based on GitHub's OpenAPI spec for api.github.com", + "repository": { + "type": "git", + "url": "https://github.com/octokit/openapi-types.ts.git", + "directory": "packages/openapi-types" + }, + "publishConfig": { + "access": "public", + "provenance": true + }, + "version": "22.2.0", + "main": "", + "types": "types.d.ts", + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "octokit": { + "openapi-version": "16.5.0" + } +} diff --git a/node_modules/@actions/github/node_modules/@octokit/core/node_modules/@octokit/types/package.json b/node_modules/@actions/github/node_modules/@octokit/core/node_modules/@octokit/types/package.json new file mode 100644 index 000000000..7ee12d257 --- /dev/null +++ b/node_modules/@actions/github/node_modules/@octokit/core/node_modules/@octokit/types/package.json @@ -0,0 +1,47 @@ +{ + "name": "@octokit/types", + "version": "13.5.0", + "publishConfig": { + "access": "public", + "provenance": true + }, + "description": "Shared TypeScript definitions for Octokit projects", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + }, + "repository": "github:octokit/types.ts", + "keywords": [ + "github", + "api", + "sdk", + "toolkit", + "typescript" + ], + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "devDependencies": { + "@octokit/tsconfig": "^2.0.0", + "@types/node": ">= 8", + "github-openapi-graphql-query": "^4.0.0", + "handlebars": "^4.7.6", + "json-schema-to-typescript": "^14.0.0", + "lodash.set": "^4.3.2", + "npm-run-all2": "^6.0.0", + "pascal-case": "^4.0.0", + "prettier": "^3.0.0", + "semantic-release": "^23.0.0", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "sort-keys": "^5.0.0", + "string-to-jsdoc-comment": "^1.0.0", + "typedoc": "^0.25.0", + "typescript": "^5.0.0" + }, + "octokit": { + "openapi-version": "16.5.0" + }, + "files": [ + "dist-types/**" + ], + "types": "dist-types/index.d.ts", + "sideEffects": false +} diff --git a/node_modules/@actions/github/node_modules/@octokit/core/package.json b/node_modules/@actions/github/node_modules/@octokit/core/package.json index c67063ac6..522065616 100644 --- a/node_modules/@actions/github/node_modules/@octokit/core/package.json +++ b/node_modules/@actions/github/node_modules/@octokit/core/package.json @@ -1,8 +1,9 @@ { "name": "@octokit/core", - "version": "5.1.0", + "version": "5.2.0", "publishConfig": { - "access": "public" + "access": "public", + "provenance": true }, "description": "Extendable client for GitHub's REST & GraphQL APIs", "repository": "github:octokit/core.js", @@ -17,17 +18,17 @@ "license": "MIT", "dependencies": { "@octokit/auth-token": "^4.0.0", - "@octokit/graphql": "^7.0.0", - "@octokit/request": "^8.0.2", - "@octokit/request-error": "^5.0.0", - "@octokit/types": "^12.0.0", + "@octokit/graphql": "^7.1.0", + "@octokit/request": "^8.3.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.0.0", "before-after-hook": "^2.2.0", "universal-user-agent": "^6.0.0" }, "devDependencies": { - "@octokit/auth-action": "^4.0.0", + "@octokit/auth-action": "^4.1.0", "@octokit/auth-app": "^6.0.0", - "@octokit/auth-oauth-app": "^7.0.0", + "@octokit/auth-oauth-app": "^7.1.0", "@octokit/tsconfig": "^2.0.0", "@sinonjs/fake-timers": "^11.2.2", "@types/fetch-mock": "^7.3.1", diff --git a/node_modules/@actions/github/node_modules/@octokit/endpoint/dist-node/index.js b/node_modules/@actions/github/node_modules/@octokit/endpoint/dist-node/index.js index 0c197b6af..3781daf05 100644 --- a/node_modules/@actions/github/node_modules/@octokit/endpoint/dist-node/index.js +++ b/node_modules/@actions/github/node_modules/@octokit/endpoint/dist-node/index.js @@ -28,7 +28,7 @@ module.exports = __toCommonJS(dist_src_exports); var import_universal_user_agent = require("universal-user-agent"); // pkg/dist-src/version.js -var VERSION = "9.0.4"; +var VERSION = "9.0.5"; // pkg/dist-src/defaults.js var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; diff --git a/node_modules/@actions/github/node_modules/@octokit/endpoint/dist-src/version.js b/node_modules/@actions/github/node_modules/@octokit/endpoint/dist-src/version.js index f3dacfaf1..1233f42e1 100644 --- a/node_modules/@actions/github/node_modules/@octokit/endpoint/dist-src/version.js +++ b/node_modules/@actions/github/node_modules/@octokit/endpoint/dist-src/version.js @@ -1,4 +1,4 @@ -const VERSION = "9.0.4"; +const VERSION = "9.0.5"; export { VERSION }; diff --git a/node_modules/@actions/github/node_modules/@octokit/endpoint/dist-web/index.js b/node_modules/@actions/github/node_modules/@octokit/endpoint/dist-web/index.js index 3ef99a571..cdb30984c 100644 --- a/node_modules/@actions/github/node_modules/@octokit/endpoint/dist-web/index.js +++ b/node_modules/@actions/github/node_modules/@octokit/endpoint/dist-web/index.js @@ -2,7 +2,7 @@ import { getUserAgent } from "universal-user-agent"; // pkg/dist-src/version.js -var VERSION = "9.0.4"; +var VERSION = "9.0.5"; // pkg/dist-src/defaults.js var userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`; diff --git a/node_modules/@actions/github/node_modules/@octokit/endpoint/node_modules/@octokit/openapi-types/package.json b/node_modules/@actions/github/node_modules/@octokit/endpoint/node_modules/@octokit/openapi-types/package.json new file mode 100644 index 000000000..350a5825d --- /dev/null +++ b/node_modules/@actions/github/node_modules/@octokit/endpoint/node_modules/@octokit/openapi-types/package.json @@ -0,0 +1,21 @@ +{ + "name": "@octokit/openapi-types", + "description": "Generated TypeScript definitions based on GitHub's OpenAPI spec for api.github.com", + "repository": { + "type": "git", + "url": "https://github.com/octokit/openapi-types.ts.git", + "directory": "packages/openapi-types" + }, + "publishConfig": { + "access": "public", + "provenance": true + }, + "version": "22.2.0", + "main": "", + "types": "types.d.ts", + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "octokit": { + "openapi-version": "16.5.0" + } +} diff --git a/node_modules/@actions/github/node_modules/@octokit/endpoint/node_modules/@octokit/types/package.json b/node_modules/@actions/github/node_modules/@octokit/endpoint/node_modules/@octokit/types/package.json new file mode 100644 index 000000000..7ee12d257 --- /dev/null +++ b/node_modules/@actions/github/node_modules/@octokit/endpoint/node_modules/@octokit/types/package.json @@ -0,0 +1,47 @@ +{ + "name": "@octokit/types", + "version": "13.5.0", + "publishConfig": { + "access": "public", + "provenance": true + }, + "description": "Shared TypeScript definitions for Octokit projects", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + }, + "repository": "github:octokit/types.ts", + "keywords": [ + "github", + "api", + "sdk", + "toolkit", + "typescript" + ], + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "devDependencies": { + "@octokit/tsconfig": "^2.0.0", + "@types/node": ">= 8", + "github-openapi-graphql-query": "^4.0.0", + "handlebars": "^4.7.6", + "json-schema-to-typescript": "^14.0.0", + "lodash.set": "^4.3.2", + "npm-run-all2": "^6.0.0", + "pascal-case": "^4.0.0", + "prettier": "^3.0.0", + "semantic-release": "^23.0.0", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "sort-keys": "^5.0.0", + "string-to-jsdoc-comment": "^1.0.0", + "typedoc": "^0.25.0", + "typescript": "^5.0.0" + }, + "octokit": { + "openapi-version": "16.5.0" + }, + "files": [ + "dist-types/**" + ], + "types": "dist-types/index.d.ts", + "sideEffects": false +} diff --git a/node_modules/@actions/github/node_modules/@octokit/endpoint/package.json b/node_modules/@actions/github/node_modules/@octokit/endpoint/package.json index eb192e3c2..66bc3fd26 100644 --- a/node_modules/@actions/github/node_modules/@octokit/endpoint/package.json +++ b/node_modules/@actions/github/node_modules/@octokit/endpoint/package.json @@ -1,6 +1,6 @@ { "name": "@octokit/endpoint", - "version": "9.0.4", + "version": "9.0.5", "publishConfig": { "access": "public" }, @@ -27,7 +27,7 @@ "typescript": "^5.0.0" }, "dependencies": { - "@octokit/types": "^12.0.0", + "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" }, "engines": { diff --git a/node_modules/@actions/github/node_modules/@octokit/graphql/dist-node/index.js b/node_modules/@actions/github/node_modules/@octokit/graphql/dist-node/index.js index e5b0a4e8f..3e9cbb619 100644 --- a/node_modules/@actions/github/node_modules/@octokit/graphql/dist-node/index.js +++ b/node_modules/@actions/github/node_modules/@octokit/graphql/dist-node/index.js @@ -29,7 +29,7 @@ var import_request3 = require("@octokit/request"); var import_universal_user_agent = require("universal-user-agent"); // pkg/dist-src/version.js -var VERSION = "7.0.2"; +var VERSION = "7.1.0"; // pkg/dist-src/with-defaults.js var import_request2 = require("@octokit/request"); diff --git a/node_modules/@actions/github/node_modules/@octokit/graphql/dist-src/version.js b/node_modules/@actions/github/node_modules/@octokit/graphql/dist-src/version.js index ad9f40dea..aa83e0f27 100644 --- a/node_modules/@actions/github/node_modules/@octokit/graphql/dist-src/version.js +++ b/node_modules/@actions/github/node_modules/@octokit/graphql/dist-src/version.js @@ -1,4 +1,4 @@ -const VERSION = "7.0.2"; +const VERSION = "7.1.0"; export { VERSION }; diff --git a/node_modules/@actions/github/node_modules/@octokit/graphql/dist-web/index.js b/node_modules/@actions/github/node_modules/@octokit/graphql/dist-web/index.js index 9a07f4503..75ebee854 100644 --- a/node_modules/@actions/github/node_modules/@octokit/graphql/dist-web/index.js +++ b/node_modules/@actions/github/node_modules/@octokit/graphql/dist-web/index.js @@ -3,7 +3,7 @@ import { request } from "@octokit/request"; import { getUserAgent } from "universal-user-agent"; // pkg/dist-src/version.js -var VERSION = "7.0.2"; +var VERSION = "7.1.0"; // pkg/dist-src/with-defaults.js import { request as Request2 } from "@octokit/request"; diff --git a/node_modules/@actions/github/node_modules/@octokit/graphql/node_modules/@octokit/openapi-types/package.json b/node_modules/@actions/github/node_modules/@octokit/graphql/node_modules/@octokit/openapi-types/package.json new file mode 100644 index 000000000..350a5825d --- /dev/null +++ b/node_modules/@actions/github/node_modules/@octokit/graphql/node_modules/@octokit/openapi-types/package.json @@ -0,0 +1,21 @@ +{ + "name": "@octokit/openapi-types", + "description": "Generated TypeScript definitions based on GitHub's OpenAPI spec for api.github.com", + "repository": { + "type": "git", + "url": "https://github.com/octokit/openapi-types.ts.git", + "directory": "packages/openapi-types" + }, + "publishConfig": { + "access": "public", + "provenance": true + }, + "version": "22.2.0", + "main": "", + "types": "types.d.ts", + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "octokit": { + "openapi-version": "16.5.0" + } +} diff --git a/node_modules/@actions/github/node_modules/@octokit/graphql/node_modules/@octokit/types/package.json b/node_modules/@actions/github/node_modules/@octokit/graphql/node_modules/@octokit/types/package.json new file mode 100644 index 000000000..7ee12d257 --- /dev/null +++ b/node_modules/@actions/github/node_modules/@octokit/graphql/node_modules/@octokit/types/package.json @@ -0,0 +1,47 @@ +{ + "name": "@octokit/types", + "version": "13.5.0", + "publishConfig": { + "access": "public", + "provenance": true + }, + "description": "Shared TypeScript definitions for Octokit projects", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + }, + "repository": "github:octokit/types.ts", + "keywords": [ + "github", + "api", + "sdk", + "toolkit", + "typescript" + ], + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "devDependencies": { + "@octokit/tsconfig": "^2.0.0", + "@types/node": ">= 8", + "github-openapi-graphql-query": "^4.0.0", + "handlebars": "^4.7.6", + "json-schema-to-typescript": "^14.0.0", + "lodash.set": "^4.3.2", + "npm-run-all2": "^6.0.0", + "pascal-case": "^4.0.0", + "prettier": "^3.0.0", + "semantic-release": "^23.0.0", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "sort-keys": "^5.0.0", + "string-to-jsdoc-comment": "^1.0.0", + "typedoc": "^0.25.0", + "typescript": "^5.0.0" + }, + "octokit": { + "openapi-version": "16.5.0" + }, + "files": [ + "dist-types/**" + ], + "types": "dist-types/index.d.ts", + "sideEffects": false +} diff --git a/node_modules/@actions/github/node_modules/@octokit/graphql/package.json b/node_modules/@actions/github/node_modules/@octokit/graphql/package.json index 342c1bcfd..ca1efc676 100644 --- a/node_modules/@actions/github/node_modules/@octokit/graphql/package.json +++ b/node_modules/@actions/github/node_modules/@octokit/graphql/package.json @@ -1,8 +1,9 @@ { "name": "@octokit/graphql", - "version": "7.0.2", + "version": "7.1.0", "publishConfig": { - "access": "public" + "access": "public", + "provenance": true }, "description": "GitHub GraphQL API client for browsers and Node", "repository": "github:octokit/graphql.js", @@ -15,8 +16,8 @@ "author": "Gregor Martynus (https://github.com/gr2m)", "license": "MIT", "dependencies": { - "@octokit/request": "^8.0.1", - "@octokit/types": "^12.0.0", + "@octokit/request": "^8.3.0", + "@octokit/types": "^13.0.0", "universal-user-agent": "^6.0.0" }, "devDependencies": { diff --git a/node_modules/@actions/github/node_modules/@octokit/request/dist-node/index.js b/node_modules/@actions/github/node_modules/@octokit/request/dist-node/index.js index 144c8797c..8eac63e60 100644 --- a/node_modules/@actions/github/node_modules/@octokit/request/dist-node/index.js +++ b/node_modules/@actions/github/node_modules/@octokit/request/dist-node/index.js @@ -27,7 +27,7 @@ var import_endpoint = require("@octokit/endpoint"); var import_universal_user_agent = require("universal-user-agent"); // pkg/dist-src/version.js -var VERSION = "8.2.0"; +var VERSION = "8.4.0"; // pkg/dist-src/is-plain-object.js function isPlainObject(value) { @@ -52,7 +52,7 @@ function getBufferResponse(response) { // pkg/dist-src/fetch-wrapper.js function fetchWrapper(requestOptions) { - var _a, _b, _c; + var _a, _b, _c, _d; const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { @@ -73,8 +73,9 @@ function fetchWrapper(requestOptions) { return fetch(requestOptions.url, { method: requestOptions.method, body: requestOptions.body, + redirect: (_c = requestOptions.request) == null ? void 0 : _c.redirect, headers: requestOptions.headers, - signal: (_c = requestOptions.request) == null ? void 0 : _c.signal, + signal: (_d = requestOptions.request) == null ? void 0 : _d.signal, // duplex must be set if request.body is ReadableStream or Async Iterables. // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. ...requestOptions.body && { duplex: "half" } diff --git a/node_modules/@actions/github/node_modules/@octokit/request/dist-src/fetch-wrapper.js b/node_modules/@actions/github/node_modules/@octokit/request/dist-src/fetch-wrapper.js index 6665716f9..c6374d455 100644 --- a/node_modules/@actions/github/node_modules/@octokit/request/dist-src/fetch-wrapper.js +++ b/node_modules/@actions/github/node_modules/@octokit/request/dist-src/fetch-wrapper.js @@ -22,6 +22,7 @@ function fetchWrapper(requestOptions) { return fetch(requestOptions.url, { method: requestOptions.method, body: requestOptions.body, + redirect: requestOptions.request?.redirect, headers: requestOptions.headers, signal: requestOptions.request?.signal, // duplex must be set if request.body is ReadableStream or Async Iterables. diff --git a/node_modules/@actions/github/node_modules/@octokit/request/dist-src/version.js b/node_modules/@actions/github/node_modules/@octokit/request/dist-src/version.js index bba0caf95..b761df444 100644 --- a/node_modules/@actions/github/node_modules/@octokit/request/dist-src/version.js +++ b/node_modules/@actions/github/node_modules/@octokit/request/dist-src/version.js @@ -1,4 +1,4 @@ -const VERSION = "8.2.0"; +const VERSION = "8.4.0"; export { VERSION }; diff --git a/node_modules/@actions/github/node_modules/@octokit/request/dist-web/index.js b/node_modules/@actions/github/node_modules/@octokit/request/dist-web/index.js index 7a04c65d7..198a29b38 100644 --- a/node_modules/@actions/github/node_modules/@octokit/request/dist-web/index.js +++ b/node_modules/@actions/github/node_modules/@octokit/request/dist-web/index.js @@ -3,7 +3,7 @@ import { endpoint } from "@octokit/endpoint"; import { getUserAgent } from "universal-user-agent"; // pkg/dist-src/version.js -var VERSION = "8.2.0"; +var VERSION = "8.4.0"; // pkg/dist-src/is-plain-object.js function isPlainObject(value) { @@ -48,6 +48,7 @@ function fetchWrapper(requestOptions) { return fetch(requestOptions.url, { method: requestOptions.method, body: requestOptions.body, + redirect: requestOptions.request?.redirect, headers: requestOptions.headers, signal: requestOptions.request?.signal, // duplex must be set if request.body is ReadableStream or Async Iterables. diff --git a/node_modules/@actions/github/node_modules/@octokit/request/node_modules/@octokit/openapi-types/package.json b/node_modules/@actions/github/node_modules/@octokit/request/node_modules/@octokit/openapi-types/package.json new file mode 100644 index 000000000..350a5825d --- /dev/null +++ b/node_modules/@actions/github/node_modules/@octokit/request/node_modules/@octokit/openapi-types/package.json @@ -0,0 +1,21 @@ +{ + "name": "@octokit/openapi-types", + "description": "Generated TypeScript definitions based on GitHub's OpenAPI spec for api.github.com", + "repository": { + "type": "git", + "url": "https://github.com/octokit/openapi-types.ts.git", + "directory": "packages/openapi-types" + }, + "publishConfig": { + "access": "public", + "provenance": true + }, + "version": "22.2.0", + "main": "", + "types": "types.d.ts", + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "octokit": { + "openapi-version": "16.5.0" + } +} diff --git a/node_modules/@actions/github/node_modules/@octokit/request/node_modules/@octokit/types/package.json b/node_modules/@actions/github/node_modules/@octokit/request/node_modules/@octokit/types/package.json new file mode 100644 index 000000000..7ee12d257 --- /dev/null +++ b/node_modules/@actions/github/node_modules/@octokit/request/node_modules/@octokit/types/package.json @@ -0,0 +1,47 @@ +{ + "name": "@octokit/types", + "version": "13.5.0", + "publishConfig": { + "access": "public", + "provenance": true + }, + "description": "Shared TypeScript definitions for Octokit projects", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + }, + "repository": "github:octokit/types.ts", + "keywords": [ + "github", + "api", + "sdk", + "toolkit", + "typescript" + ], + "author": "Gregor Martynus (https://twitter.com/gr2m)", + "license": "MIT", + "devDependencies": { + "@octokit/tsconfig": "^2.0.0", + "@types/node": ">= 8", + "github-openapi-graphql-query": "^4.0.0", + "handlebars": "^4.7.6", + "json-schema-to-typescript": "^14.0.0", + "lodash.set": "^4.3.2", + "npm-run-all2": "^6.0.0", + "pascal-case": "^4.0.0", + "prettier": "^3.0.0", + "semantic-release": "^23.0.0", + "semantic-release-plugin-update-version-in-files": "^1.0.0", + "sort-keys": "^5.0.0", + "string-to-jsdoc-comment": "^1.0.0", + "typedoc": "^0.25.0", + "typescript": "^5.0.0" + }, + "octokit": { + "openapi-version": "16.5.0" + }, + "files": [ + "dist-types/**" + ], + "types": "dist-types/index.d.ts", + "sideEffects": false +} diff --git a/node_modules/@actions/github/node_modules/@octokit/request/package.json b/node_modules/@actions/github/node_modules/@octokit/request/package.json index 66ce1a58e..143071761 100644 --- a/node_modules/@actions/github/node_modules/@octokit/request/package.json +++ b/node_modules/@actions/github/node_modules/@octokit/request/package.json @@ -1,8 +1,9 @@ { "name": "@octokit/request", - "version": "8.2.0", + "version": "8.4.0", "publishConfig": { - "access": "public" + "access": "public", + "provenance": true }, "description": "Send parameterized requests to GitHub's APIs with sensible defaults in browsers and Node", "repository": "github:octokit/request.js", @@ -15,9 +16,9 @@ "author": "Gregor Martynus (https://github.com/gr2m)", "license": "MIT", "dependencies": { - "@octokit/endpoint": "^9.0.0", - "@octokit/request-error": "^5.0.0", - "@octokit/types": "^12.0.0", + "@octokit/endpoint": "^9.0.1", + "@octokit/request-error": "^5.1.0", + "@octokit/types": "^13.1.0", "universal-user-agent": "^6.0.0" }, "devDependencies": { diff --git a/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json index 22735db17..6305f1798 100644 --- a/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json +++ b/node_modules/@azure/abort-controller/dist/commonjs/tsdoc-metadata.json @@ -5,7 +5,7 @@ "toolPackages": [ { "packageName": "@microsoft/api-extractor", - "packageVersion": "7.42.3" + "packageVersion": "7.43.1" } ] } diff --git a/node_modules/@azure/abort-controller/package.json b/node_modules/@azure/abort-controller/package.json index d8375e5ea..634e3a307 100644 --- a/node_modules/@azure/abort-controller/package.json +++ b/node_modules/@azure/abort-controller/package.json @@ -1,7 +1,7 @@ { "name": "@azure/abort-controller", "sdk-type": "client", - "version": "2.1.1", + "version": "2.1.2", "description": "Microsoft Azure SDK for JavaScript - Aborter", "author": "Microsoft Corporation", "license": "MIT", @@ -91,7 +91,7 @@ "playwright": "^1.41.2", "prettier": "^3.2.5", "rimraf": "^5.0.5", - "tshy": "^1.11.1", + "tshy": "^1.13.0", "typescript": "~5.3.3", "vitest": "^1.3.1" }, diff --git a/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.js b/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.js index 050f0fdf4..e1ec1af9f 100644 --- a/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.js +++ b/node_modules/@azure/core-auth/dist/browser/azureKeyCredential.js @@ -5,7 +5,6 @@ * the underlying key value. */ export class AzureKeyCredential { - _key; /** * The value of the key to be used in authentication */ diff --git a/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.js b/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.js index 76088504b..3cd59bfa7 100644 --- a/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.js +++ b/node_modules/@azure/core-auth/dist/browser/azureNamedKeyCredential.js @@ -6,8 +6,6 @@ import { isObjectWithProperties } from "@azure/core-util"; * the underlying name and key values. */ export class AzureNamedKeyCredential { - _key; - _name; /** * The value of the key to be used in authentication. */ diff --git a/node_modules/@azure/core-auth/dist/browser/azureSASCredential.js b/node_modules/@azure/core-auth/dist/browser/azureSASCredential.js index a7da3b3d3..d9d6e0b6b 100644 --- a/node_modules/@azure/core-auth/dist/browser/azureSASCredential.js +++ b/node_modules/@azure/core-auth/dist/browser/azureSASCredential.js @@ -6,7 +6,6 @@ import { isObjectWithProperties } from "@azure/core-util"; * the underlying signature value. */ export class AzureSASCredential { - _signature; /** * The value of the shared access signature to be used in authentication */ diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js b/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js index ea21c1d69..7c57fc159 100644 --- a/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js +++ b/node_modules/@azure/core-auth/dist/commonjs/azureKeyCredential.js @@ -8,7 +8,6 @@ exports.AzureKeyCredential = void 0; * the underlying key value. */ class AzureKeyCredential { - _key; /** * The value of the key to be used in authentication */ diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js b/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js index e0b73fa05..8fa14f861 100644 --- a/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js +++ b/node_modules/@azure/core-auth/dist/commonjs/azureNamedKeyCredential.js @@ -9,8 +9,6 @@ const core_util_1 = require("@azure/core-util"); * the underlying name and key values. */ class AzureNamedKeyCredential { - _key; - _name; /** * The value of the key to be used in authentication. */ diff --git a/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js b/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js index 1c29897cf..c59fab7c7 100644 --- a/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js +++ b/node_modules/@azure/core-auth/dist/commonjs/azureSASCredential.js @@ -9,7 +9,6 @@ const core_util_1 = require("@azure/core-util"); * the underlying signature value. */ class AzureSASCredential { - _signature; /** * The value of the shared access signature to be used in authentication */ diff --git a/node_modules/@azure/core-auth/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-auth/dist/commonjs/tsdoc-metadata.json index 22735db17..6305f1798 100644 --- a/node_modules/@azure/core-auth/dist/commonjs/tsdoc-metadata.json +++ b/node_modules/@azure/core-auth/dist/commonjs/tsdoc-metadata.json @@ -5,7 +5,7 @@ "toolPackages": [ { "packageName": "@microsoft/api-extractor", - "packageVersion": "7.42.3" + "packageVersion": "7.43.1" } ] } diff --git a/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.js b/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.js index 050f0fdf4..e1ec1af9f 100644 --- a/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.js +++ b/node_modules/@azure/core-auth/dist/esm/azureKeyCredential.js @@ -5,7 +5,6 @@ * the underlying key value. */ export class AzureKeyCredential { - _key; /** * The value of the key to be used in authentication */ diff --git a/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.js b/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.js index 76088504b..3cd59bfa7 100644 --- a/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.js +++ b/node_modules/@azure/core-auth/dist/esm/azureNamedKeyCredential.js @@ -6,8 +6,6 @@ import { isObjectWithProperties } from "@azure/core-util"; * the underlying name and key values. */ export class AzureNamedKeyCredential { - _key; - _name; /** * The value of the key to be used in authentication. */ diff --git a/node_modules/@azure/core-auth/dist/esm/azureSASCredential.js b/node_modules/@azure/core-auth/dist/esm/azureSASCredential.js index a7da3b3d3..d9d6e0b6b 100644 --- a/node_modules/@azure/core-auth/dist/esm/azureSASCredential.js +++ b/node_modules/@azure/core-auth/dist/esm/azureSASCredential.js @@ -6,7 +6,6 @@ import { isObjectWithProperties } from "@azure/core-util"; * the underlying signature value. */ export class AzureSASCredential { - _signature; /** * The value of the shared access signature to be used in authentication */ diff --git a/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.js b/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.js index 050f0fdf4..e1ec1af9f 100644 --- a/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.js +++ b/node_modules/@azure/core-auth/dist/react-native/azureKeyCredential.js @@ -5,7 +5,6 @@ * the underlying key value. */ export class AzureKeyCredential { - _key; /** * The value of the key to be used in authentication */ diff --git a/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.js b/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.js index 76088504b..3cd59bfa7 100644 --- a/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.js +++ b/node_modules/@azure/core-auth/dist/react-native/azureNamedKeyCredential.js @@ -6,8 +6,6 @@ import { isObjectWithProperties } from "@azure/core-util"; * the underlying name and key values. */ export class AzureNamedKeyCredential { - _key; - _name; /** * The value of the key to be used in authentication. */ diff --git a/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.js b/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.js index a7da3b3d3..d9d6e0b6b 100644 --- a/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.js +++ b/node_modules/@azure/core-auth/dist/react-native/azureSASCredential.js @@ -6,7 +6,6 @@ import { isObjectWithProperties } from "@azure/core-util"; * the underlying signature value. */ export class AzureSASCredential { - _signature; /** * The value of the shared access signature to be used in authentication */ diff --git a/node_modules/@azure/core-auth/package.json b/node_modules/@azure/core-auth/package.json index faf8695b3..421b61996 100644 --- a/node_modules/@azure/core-auth/package.json +++ b/node_modules/@azure/core-auth/package.json @@ -1,6 +1,6 @@ { "name": "@azure/core-auth", - "version": "1.7.1", + "version": "1.7.2", "description": "Provides low-level interfaces and helper methods for authentication in Azure SDK", "sdk-type": "client", "type": "module", @@ -87,7 +87,7 @@ "playwright": "^1.41.2", "prettier": "^3.2.5", "rimraf": "^5.0.5", - "tshy": "^1.11.1", + "tshy": "^1.13.0", "typescript": "~5.3.3", "vitest": "^1.3.1" }, diff --git a/node_modules/@azure/core-client/dist/browser/authorizeRequestOnClaimChallenge.js b/node_modules/@azure/core-client/dist/browser/authorizeRequestOnClaimChallenge.js new file mode 100644 index 000000000..9de3e8b83 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/authorizeRequestOnClaimChallenge.js @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { logger as coreClientLogger } from "./log.js"; +import { decodeStringToString } from "./base64.js"; +/** + * Converts: `Bearer a="b", c="d", Bearer d="e", f="g"`. + * Into: `[ { a: 'b', c: 'd' }, { d: 'e', f: 'g' } ]`. + * + * @internal + */ +export function parseCAEChallenge(challenges) { + const bearerChallenges = `, ${challenges.trim()}`.split(", Bearer ").filter((x) => x); + return bearerChallenges.map((challenge) => { + const challengeParts = `${challenge.trim()}, `.split('", ').filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split('="'))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); + }); +} +/** + * This function can be used as a callback for the `bearerTokenAuthenticationPolicy` of `@azure/core-rest-pipeline`, to support CAE challenges: + * [Continuous Access Evaluation](https://docs.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation). + * + * Call the `bearerTokenAuthenticationPolicy` with the following options: + * + * ```ts + * import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline"; + * import { authorizeRequestOnClaimChallenge } from "@azure/core-client"; + * + * const bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy({ + * authorizeRequestOnChallenge: authorizeRequestOnClaimChallenge + * }); + * ``` + * + * Once provided, the `bearerTokenAuthenticationPolicy` policy will internally handle Continuous Access Evaluation (CAE) challenges. + * When it can't complete a challenge it will return the 401 (unauthorized) response from ARM. + * + * Example challenge with claims: + * + * ``` + * Bearer authorization_uri="https://login.windows-ppe.net/", error="invalid_token", + * error_description="User session has been revoked", + * claims="eyJhY2Nlc3NfdG9rZW4iOnsibmJmIjp7ImVzc2VudGlhbCI6dHJ1ZSwgInZhbHVlIjoiMTYwMzc0MjgwMCJ9fX0=" + * ``` + */ +export async function authorizeRequestOnClaimChallenge(onChallengeOptions) { + const { scopes, response } = onChallengeOptions; + const logger = onChallengeOptions.logger || coreClientLogger; + const challenge = response.headers.get("WWW-Authenticate"); + if (!challenge) { + logger.info(`The WWW-Authenticate header was missing. Failed to perform the Continuous Access Evaluation authentication flow.`); + return false; + } + const challenges = parseCAEChallenge(challenge) || []; + const parsedChallenge = challenges.find((x) => x.claims); + if (!parsedChallenge) { + logger.info(`The WWW-Authenticate header was missing the necessary "claims" to perform the Continuous Access Evaluation authentication flow.`); + return false; + } + const accessToken = await onChallengeOptions.getAccessToken(parsedChallenge.scope ? [parsedChallenge.scope] : scopes, { + claims: decodeStringToString(parsedChallenge.claims), + }); + if (!accessToken) { + return false; + } + onChallengeOptions.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + return true; +} +//# sourceMappingURL=authorizeRequestOnClaimChallenge.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/authorizeRequestOnTenantChallenge.js b/node_modules/@azure/core-client/dist/browser/authorizeRequestOnTenantChallenge.js new file mode 100644 index 000000000..0943fcede --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/authorizeRequestOnTenantChallenge.js @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +function isUuid(text) { + return /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/.test(text); +} +/** + * Defines a callback to handle auth challenge for Storage APIs. + * This implements the bearer challenge process described here: https://docs.microsoft.com/rest/api/storageservices/authorize-with-azure-active-directory#bearer-challenge + * Handling has specific features for storage that departs to the general AAD challenge docs. + **/ +export const authorizeRequestOnTenantChallenge = async (challengeOptions) => { + const requestOptions = requestToOptions(challengeOptions.request); + const challenge = getChallenge(challengeOptions.response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = buildScopes(challengeOptions, challengeInfo); + const tenantId = extractTenantId(challengeInfo); + if (!tenantId) { + return false; + } + const accessToken = await challengeOptions.getAccessToken(challengeScopes, Object.assign(Object.assign({}, requestOptions), { tenantId })); + if (!accessToken) { + return false; + } + challengeOptions.request.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${accessToken.token}`); + return true; + } + return false; +}; +/** + * Extracts the tenant id from the challenge information + * The tenant id is contained in the authorization_uri as the first + * path part. + */ +function extractTenantId(challengeInfo) { + const parsedAuthUri = new URL(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.pathname.split("/"); + const tenantId = pathSegments[1]; + if (tenantId && isUuid(tenantId)) { + return tenantId; + } + return undefined; +} +/** + * Builds the authentication scopes based on the information that comes in the + * challenge information. Scopes url is present in the resource_id, if it is empty + * we keep using the original scopes. + */ +function buildScopes(challengeOptions, challengeInfo) { + if (!challengeInfo.resource_id) { + return challengeOptions.scopes; + } + const challengeScopes = new URL(challengeInfo.resource_id); + challengeScopes.pathname = Constants.DefaultScope; + let scope = challengeScopes.toString(); + if (scope === "https://disk.azure.com/.default") { + // the extra slash is required by the service + scope = "https://disk.azure.com//.default"; + } + return [scope]; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +/** + * Extracts the options form a Pipeline Request for later re-use + */ +function requestToOptions(request) { + return { + abortSignal: request.abortSignal, + requestOptions: { + timeout: request.timeout, + }, + tracingOptions: request.tracingOptions, + }; +} +//# sourceMappingURL=authorizeRequestOnTenantChallenge.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/base64.js b/node_modules/@azure/core-client/dist/browser/base64.js new file mode 100644 index 000000000..cf1a770c5 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/base64.js @@ -0,0 +1,44 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Encodes a string in base64 format. + * @param value - the string to encode + * @internal + */ +export function encodeString(value) { + return btoa(value); +} +/** + * Encodes a byte array in base64 format. + * @param value - the Uint8Aray to encode + * @internal + */ +export function encodeByteArray(value) { + let str = ""; + for (let i = 0; i < value.length; i++) { + str += String.fromCharCode(value[i]); + } + return btoa(str); +} +/** + * Decodes a base64 string into a byte array. + * @param value - the base64 string to decode + * @internal + */ +export function decodeString(value) { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; +} +/** + * Decodes a base64 string into a string. + * @param value - the base64 string to decode + * @internal + */ +export function decodeStringToString(value) { + return atob(value); +} +//# sourceMappingURL=base64-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/deserializationPolicy.js b/node_modules/@azure/core-client/dist/browser/deserializationPolicy.js new file mode 100644 index 000000000..bdda3cd33 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/deserializationPolicy.js @@ -0,0 +1,231 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { XML_CHARKEY, } from "./interfaces.js"; +import { RestError, } from "@azure/core-rest-pipeline"; +import { MapperTypeNames } from "./serializer.js"; +import { getOperationRequestInfo } from "./operationHelpers.js"; +const defaultJsonContentTypes = ["application/json", "text/json"]; +const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +/** + * The programmatic identifier of the deserializationPolicy. + */ +export const deserializationPolicyName = "deserializationPolicy"; +/** + * This policy handles parsing out responses according to OperationSpecs on the request. + */ +export function deserializationPolicy(options = {}) { + var _a, _b, _c, _d, _e, _f, _g; + const jsonContentTypes = (_b = (_a = options.expectedContentTypes) === null || _a === void 0 ? void 0 : _a.json) !== null && _b !== void 0 ? _b : defaultJsonContentTypes; + const xmlContentTypes = (_d = (_c = options.expectedContentTypes) === null || _c === void 0 ? void 0 : _c.xml) !== null && _d !== void 0 ? _d : defaultXmlContentTypes; + const parseXML = options.parseXML; + const serializerOptions = options.serializerOptions; + const updatedOptions = { + xml: { + rootName: (_e = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _e !== void 0 ? _e : "", + includeRoot: (_f = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _f !== void 0 ? _f : false, + xmlCharKey: (_g = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _g !== void 0 ? _g : XML_CHARKEY, + }, + }; + return { + name: deserializationPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, updatedOptions, parseXML); + }, + }; +} +function getOperationResponseMap(parsedResponse) { + let result; + const request = parsedResponse.request; + const operationInfo = getOperationRequestInfo(request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + if (operationSpec) { + if (!(operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter)) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + const request = parsedResponse.request; + const operationInfo = getOperationRequestInfo(request); + const shouldDeserialize = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.shouldDeserialize; + let result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +async function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options, parseXML) { + const parsedResponse = await parse(jsonContentTypes, xmlContentTypes, response, options, parseXML); + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationInfo = getOperationRequestInfo(parsedResponse.request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponseMap(parsedResponse); + const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec, options); + if (error) { + throw error; + } + else if (shouldReturnResponse) { + return parsedResponse; + } + // An operation response spec does exist for current status code, so + // use it to deserialize the response. + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperTypeNames.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } + catch (deserializeError) { + const restError = new RestError(`Error ${deserializeError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse, + }); + throw restError; + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders", { xml: {}, ignoreUnknownProperties: true }); + } + } + return parsedResponse; +} +function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return (expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); +} +function handleErrorResponse(parsedResponse, operationSpec, responseSpec, options) { + var _a; + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + ? isSuccessByStatus + : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; + } + } + else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + const initialErrorMessage = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) + ? `Unexpected status code: ${parsedResponse.status}` + : parsedResponse.bodyAsText; + const error = new RestError(initialErrorMessage, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse, + }); + // If the item failed but there's no error spec or default spec to deserialize the error, + // we should fail so we just throw the parsed response + if (!errorResponseSpec) { + throw error; + } + const defaultBodyMapper = errorResponseSpec.bodyMapper; + const defaultHeadersMapper = errorResponseSpec.headersMapper; + try { + // If error response has a body, try to deserialize it using default body mapper. + // Then try to extract error code & message from it + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let deserializedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === MapperTypeNames.Sequence) { + valueToDeserialize = []; + const elementName = defaultBodyMapper.xmlElementName; + if (typeof parsedBody === "object" && elementName) { + valueToDeserialize = parsedBody[elementName]; + } + } + deserializedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody", options); + } + const internalError = parsedBody.error || deserializedError || parsedBody; + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + if (defaultBodyMapper) { + error.response.parsedBody = deserializedError; + } + } + // If error response has headers, try to deserialize it using default header mapper + if (parsedResponse.headers && defaultHeadersMapper) { + error.response.parsedHeaders = + operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders"); + } + } + catch (defaultError) { + error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + } + return { error, shouldReturnResponse: false }; +} +async function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts, parseXML) { + var _a; + if (!((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) && + operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + try { + if (contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + operationResponse.parsedBody = JSON.parse(text); + return operationResponse; + } + else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + if (!parseXML) { + throw new Error("Parsing XML not supported."); + } + const body = await parseXML(text, opts.xml); + operationResponse.parsedBody = body; + return operationResponse; + } + } + catch (err) { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || RestError.PARSE_ERROR; + const e = new RestError(msg, { + code: errCode, + statusCode: operationResponse.status, + request: operationResponse.request, + response: operationResponse, + }); + throw e; + } + } + return operationResponse; +} +//# sourceMappingURL=deserializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/httpClientCache.js b/node_modules/@azure/core-client/dist/browser/httpClientCache.js new file mode 100644 index 000000000..ad48a498c --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/httpClientCache.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createDefaultHttpClient } from "@azure/core-rest-pipeline"; +let cachedHttpClient; +export function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = createDefaultHttpClient(); + } + return cachedHttpClient; +} +//# sourceMappingURL=httpClientCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/index.js b/node_modules/@azure/core-client/dist/browser/index.js new file mode 100644 index 000000000..3f91b1c44 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/index.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { createSerializer, MapperTypeNames } from "./serializer.js"; +export { ServiceClient } from "./serviceClient.js"; +export { createClientPipeline } from "./pipeline.js"; +export { XML_ATTRKEY, XML_CHARKEY, } from "./interfaces.js"; +export { deserializationPolicy, deserializationPolicyName, } from "./deserializationPolicy.js"; +export { serializationPolicy, serializationPolicyName, } from "./serializationPolicy.js"; +export { authorizeRequestOnClaimChallenge } from "./authorizeRequestOnClaimChallenge.js"; +export { authorizeRequestOnTenantChallenge } from "./authorizeRequestOnTenantChallenge.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/interfaceHelpers.js b/node_modules/@azure/core-client/dist/browser/interfaceHelpers.js new file mode 100644 index 000000000..80b8905e6 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/interfaceHelpers.js @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { MapperTypeNames } from "./serializer.js"; +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +export function getStreamingResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperTypeNames.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + * @internal + */ +export function getPathStringFromParameter(parameter) { + const { parameterPath, mapper } = parameter; + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} +//# sourceMappingURL=interfaceHelpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/interfaces.js b/node_modules/@azure/core-client/dist/browser/interfaces.js new file mode 100644 index 000000000..6a3bc345f --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/interfaces.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Default key used to access the XML attributes. + */ +export const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export const XML_CHARKEY = "_"; +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/log.js b/node_modules/@azure/core-client/dist/browser/log.js new file mode 100644 index 000000000..9caaedac8 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/log.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +export const logger = createClientLogger("core-client"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/operationHelpers.js b/node_modules/@azure/core-client/dist/browser/operationHelpers.js new file mode 100644 index 000000000..942ba360f --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/operationHelpers.js @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { state } from "./state.js"; +/** + * @internal + * Retrieves the value to use for a given operation argument + * @param operationArguments - The arguments passed from the generated client + * @param parameter - The parameter description + * @param fallbackObject - If something isn't found in the arguments bag, look here. + * Generally used to look at the service client properties. + */ +export function getOperationArgumentValueFromParameter(operationArguments, parameter, fallbackObject) { + let parameterPath = parameter.parameterPath; + const parameterMapper = parameter.mapper; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound && fallbackObject) { + propertySearchResult = getPropertyFromParameterPath(fallbackObject, parameterPath); + } + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameter(operationArguments, { + parameterPath: propertyPath, + mapper: propertyMapper, + }, fallbackObject); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +const originalRequestSymbol = Symbol.for("@azure/core-client original request"); +function hasOriginalRequest(request) { + return originalRequestSymbol in request; +} +export function getOperationRequestInfo(request) { + if (hasOriginalRequest(request)) { + return getOperationRequestInfo(request[originalRequestSymbol]); + } + let info = state.operationRequestMap.get(request); + if (!info) { + info = {}; + state.operationRequestMap.set(request, info); + } + return info; +} +//# sourceMappingURL=operationHelpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/package.json b/node_modules/@azure/core-client/dist/browser/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-client/dist/browser/pipeline.js b/node_modules/@azure/core-client/dist/browser/pipeline.js new file mode 100644 index 000000000..b320ce619 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/pipeline.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { deserializationPolicy } from "./deserializationPolicy.js"; +import { bearerTokenAuthenticationPolicy, createPipelineFromOptions, } from "@azure/core-rest-pipeline"; +import { serializationPolicy } from "./serializationPolicy.js"; +/** + * Creates a new Pipeline for use with a Service Client. + * Adds in deserializationPolicy by default. + * Also adds in bearerTokenAuthenticationPolicy if passed a TokenCredential. + * @param options - Options to customize the created pipeline. + */ +export function createClientPipeline(options = {}) { + const pipeline = createPipelineFromOptions(options !== null && options !== void 0 ? options : {}); + if (options.credentialOptions) { + pipeline.addPolicy(bearerTokenAuthenticationPolicy({ + credential: options.credentialOptions.credential, + scopes: options.credentialOptions.credentialScopes, + })); + } + pipeline.addPolicy(serializationPolicy(options.serializationOptions), { phase: "Serialize" }); + pipeline.addPolicy(deserializationPolicy(options.deserializationOptions), { + phase: "Deserialize", + }); + return pipeline; +} +//# sourceMappingURL=pipeline.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/serializationPolicy.js b/node_modules/@azure/core-client/dist/browser/serializationPolicy.js new file mode 100644 index 000000000..a75aceba7 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/serializationPolicy.js @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { XML_ATTRKEY, XML_CHARKEY, } from "./interfaces.js"; +import { getOperationArgumentValueFromParameter, getOperationRequestInfo, } from "./operationHelpers.js"; +import { MapperTypeNames } from "./serializer.js"; +import { getPathStringFromParameter } from "./interfaceHelpers.js"; +/** + * The programmatic identifier of the serializationPolicy. + */ +export const serializationPolicyName = "serializationPolicy"; +/** + * This policy handles assembling the request body and headers using + * an OperationSpec and OperationArguments on the request. + */ +export function serializationPolicy(options = {}) { + const stringifyXML = options.stringifyXML; + return { + name: serializationPolicyName, + async sendRequest(request, next) { + const operationInfo = getOperationRequestInfo(request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + const operationArguments = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationArguments; + if (operationSpec && operationArguments) { + serializeHeaders(request, operationArguments, operationSpec); + serializeRequestBody(request, operationArguments, operationSpec, stringifyXML); + } + return next(request); + }, + }; +} +/** + * @internal + */ +export function serializeHeaders(request, operationArguments, operationSpec) { + var _a, _b; + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = getOperationArgumentValueFromParameter(operationArguments, headerParameter); + if ((headerValue !== null && headerValue !== undefined) || headerParameter.mapper.required) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter)); + const headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + request.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + request.headers.set(headerParameter.mapper.serializedName || getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + const customHeaders = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.requestOptions) === null || _b === void 0 ? void 0 : _b.customHeaders; + if (customHeaders) { + for (const customHeaderName of Object.keys(customHeaders)) { + request.headers.set(customHeaderName, customHeaders[customHeaderName]); + } + } +} +/** + * @internal + */ +export function serializeRequestBody(request, operationArguments, operationSpec, stringifyXML = function () { + throw new Error("XML serialization unsupported!"); +}) { + var _a, _b, _c, _d, _e; + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + const updatedOptions = { + xml: { + rootName: (_b = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _b !== void 0 ? _b : "", + includeRoot: (_c = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _c !== void 0 ? _c : false, + xmlCharKey: (_d = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _d !== void 0 ? _d : XML_CHARKEY, + }, + }; + const xmlCharKey = updatedOptions.xml.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + request.body = getOperationArgumentValueFromParameter(operationArguments, operationSpec.requestBody); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, serializedName, xmlName, xmlElementName, xmlNamespace, xmlNamespacePrefix, nullable, } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if ((request.body !== undefined && request.body !== null) || + (nullable && request.body === null) || + required) { + const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + request.body = operationSpec.serializer.serialize(bodyMapper, request.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === MapperTypeNames.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, request.body, updatedOptions); + if (typeName === MapperTypeNames.Sequence) { + request.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, xmlCharKey }); + } + else if (!isStream) { + request.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + } + else if (typeName === MapperTypeNames.String && + (((_e = operationSpec.contentType) === null || _e === void 0 ? void 0 : _e.match("text/plain")) || operationSpec.mediaType === "text")) { + // the String serializer has validated that request body is a string + // so just send the string. + return; + } + else if (!isStream) { + request.body = JSON.stringify(request.body); + } + } + } + catch (error) { + throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + request.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = getOperationArgumentValueFromParameter(operationArguments, formDataParameter); + if (formDataParameterValue !== undefined && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + request.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); + } + } + } +} +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; +} +function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; +} +//# sourceMappingURL=serializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/serializer.js b/node_modules/@azure/core-client/dist/browser/serializer.js new file mode 100644 index 000000000..9dc240521 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/serializer.js @@ -0,0 +1,922 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as base64 from "./base64.js"; +import { XML_ATTRKEY, XML_CHARKEY, } from "./interfaces.js"; +import { isDuration, isValidUuid } from "./utils.js"; +class SerializerImpl { + constructor(modelMappers = {}, isXML = false) { + this.modelMappers = modelMappers; + this.isXML = isXML; + } + /** + * @deprecated Removing the constraints validation on client side. + */ + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value !== undefined && value !== null) { + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; + if (ExclusiveMaximum !== undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum !== undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum !== undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum !== undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems !== undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength !== undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems !== undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength !== undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf !== undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); + } + } + } + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param object - A valid Javascript object to be serialized + * + * @param objectName - Name of the serialized object + * + * @param options - additional options to serialization + * + * @returns A valid serialized Javascript object + */ + serialize(mapper, object, objectName, options = { xml: {} }) { + var _a, _b, _c; + const updatedOptions = { + xml: { + rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }, + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + const { required, nullable } = mapper; + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && (object === undefined || object === null)) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object === undefined || object === null) { + payload = object; + } + else { + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + } + return payload; + } + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param responseBody - A valid Javascript entity to be deserialized + * + * @param objectName - Name of the deserialized object + * + * @param options - Controls behavior of XML parser and builder. + * + * @returns A valid deserialized Javascript object + */ + deserialize(mapper, responseBody, objectName, options = { xml: {} }) { + var _a, _b, _c, _d; + const updatedOptions = { + xml: { + rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }, + ignoreUnknownProperties: (_d = options.ignoreUnknownProperties) !== null && _d !== void 0 ? _d : false, + }; + if (responseBody === undefined || responseBody === null) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } + else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xml.xmlCharKey; + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, + * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. + */ + if (responseBody[XML_ATTRKEY] !== undefined && responseBody[xmlCharKey] !== undefined) { + responseBody = responseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = base64.decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + } +} +/** + * Method that creates and returns a Serializer. + * @param modelMappers - Known models to map + * @param isXML - If XML should be supported + */ +export function createSerializer(modelMappers = {}, isXML = false) { + return new SerializerImpl(modelMappers, isXML); +} +function trimEnd(str, ch) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/-/g, "+").replace(/_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} +function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } + else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } + else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } + else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } + else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && + typeof value.pipe !== "function" && // NodeJS.ReadableStream + typeof value.tee !== "function" && // browser ReadableStream + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + // File objects count as a type of Blob, so we want to use instanceof explicitly + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob) && + objectType !== "function") { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, ReadableStream, or () => ReadableStream.`); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + } + return value; +} +function serializeByteArrayType(objectName, value) { + if (value !== undefined && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = base64.encodeByteArray(value); + } + return value; +} +function serializeBase64UrlType(objectName, value) { + if (value !== undefined && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = bufferToBase64Url(value); + } + return value; +} +function serializeDateTypes(typeName, value, objectName) { + if (value !== undefined && value !== null) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!isDuration(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + } + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + var _a; + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + let elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + // Quirk: Composite mappers referenced by `element` might + // not have *all* properties declared (like uberParent), + // so let's try to look up the full definition by name. + if (elementType.type.name === "Composite" && elementType.type.className) { + elementType = (_a = serializer.modelMappers[elementType.type.className]) !== null && _a !== void 0 ? _a : elementType; + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix + ? `xmlns:${elementType.xmlNamespacePrefix}` + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = Object.assign({}, serializedValue); + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + else { + tempArray[i] = {}; + tempArray[i][options.xml.xmlCharKey] = serializedValue; + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } + else { + tempArray[i] = serializedValue; + } + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; +} +/** + * Resolves the additionalProperties property from a referenced mapper + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + * @param objectName - name of the object being serialized + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by className + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + * @param objectName - name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + } + return serializer.modelMappers[className]; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + } + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object !== undefined && object !== null) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if ((childObject === undefined || childObject === null) && + ((object[key] !== undefined && object[key] !== null) || + propertyMapper.defaultValue !== undefined)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject !== undefined && parentObject !== null) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix + ? `xmlns:${mapper.xmlNamespacePrefix}` + : "xmlns"; + parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + } + const propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + (toSerialize === undefined || toSerialize === null)) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== undefined && propName !== undefined && propName !== null) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; + parentObject[XML_ATTRKEY][propName] = serializedValue; + } + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } + else { + parentObject[propName] = value; + } + } + } + } + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); + } + } + } + return payload; + } + return object; +} +function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + const xmlnsKey = propertyMapper.xmlNamespacePrefix + ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[XML_ATTRKEY]) { + return serializedValue; + } + else { + const result = Object.assign({}, serializedValue); + result[XML_ATTRKEY] = xmlNamespace; + return result; + } + } + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = xmlNamespace; + return result; +} +function isSpecialXmlProperty(propertyName, options) { + return [XML_ATTRKEY, options.xml.xmlCharKey].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + var _a, _b; + const xmlCharKey = (_a = options.xml.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); + } + else if (propertyMapper.xmlIsMsText) { + if (responseBody[xmlCharKey] !== undefined) { + instance[key] = responseBody[xmlCharKey]; + } + else if (typeof responseBody === "string") { + // The special case where xml parser parses "content" into JSON of + // `{ name: "content"}` instead of `{ name: { "_": "content" }}` + instance[key] = responseBody; + } + } + else { + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + const wrapped = responseBody[xmlName]; + const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + handledPropertyNames.push(xmlName); + } + else { + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + handledPropertyNames.push(propertyName); + } + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + let steps = 0; + for (const item of paths) { + if (!res) + break; + steps++; + res = res[item]; + } + // only accept null when reaching the last position of object otherwise it would be undefined + if (res === null && steps < paths.length) { + res = undefined; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + (propertyInstance === undefined || propertyInstance === null)) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; + } + } + } + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } + else if (responseBody && !options.ignoreUnknownProperties) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + /* jshint validthis: true */ + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + var _a; + let element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + // Quirk: Composite mappers referenced by `element` might + // not have *all* properties declared (like uberParent), + // so let's try to look up the full definition by name. + if (element.type.name === "Composite" && element.type.className) { + element = (_a = serializer.modelMappers[element.type.className]) !== null && _a !== void 0 ? _a : element; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + } + return tempArray; + } + return responseBody; +} +function getIndexDiscriminator(discriminators, discriminatorValue, typeName) { + const typeNamesToCheck = [typeName]; + while (typeNamesToCheck.length) { + const currentName = typeNamesToCheck.shift(); + const indexDiscriminator = discriminatorValue === currentName + ? discriminatorValue + : currentName + "." + discriminatorValue; + if (Object.prototype.hasOwnProperty.call(discriminators, indexDiscriminator)) { + return discriminators[indexDiscriminator]; + } + else { + for (const [name, mapper] of Object.entries(discriminators)) { + if (name.startsWith(currentName + ".") && + mapper.type.uberParent === currentName && + mapper.type.className) { + typeNamesToCheck.push(mapper.type.className); + } + } + } + } + return undefined; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var _a; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + let discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName) { + // The serializedName might have \\, which we just want to ignore + if (polymorphicPropertyName === "serializedName") { + discriminatorName = discriminatorName.replace(/\\/gi, ""); + } + const discriminatorValue = object[discriminatorName]; + const typeName = (_a = mapper.type.uberParent) !== null && _a !== void 0 ? _a : mapper.type.className; + if (typeof discriminatorValue === "string" && typeName) { + const polymorphicMapper = getIndexDiscriminator(serializer.modelMappers.discriminators, discriminatorValue, typeName); + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +/** + * Known types of Mappers + */ +export const MapperTypeNames = { + Base64Url: "Base64Url", + Boolean: "Boolean", + ByteArray: "ByteArray", + Composite: "Composite", + Date: "Date", + DateTime: "DateTime", + DateTimeRfc1123: "DateTimeRfc1123", + Dictionary: "Dictionary", + Enum: "Enum", + Number: "Number", + Object: "Object", + Sequence: "Sequence", + String: "String", + Stream: "Stream", + TimeSpan: "TimeSpan", + UnixTime: "UnixTime", +}; +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/serviceClient.js b/node_modules/@azure/core-client/dist/browser/serviceClient.js new file mode 100644 index 000000000..eccd0d8cf --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/serviceClient.js @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createPipelineRequest, } from "@azure/core-rest-pipeline"; +import { createClientPipeline } from "./pipeline.js"; +import { flattenResponse } from "./utils.js"; +import { getCachedDefaultHttpClient } from "./httpClientCache.js"; +import { getOperationRequestInfo } from "./operationHelpers.js"; +import { getRequestUrl } from "./urlHelpers.js"; +import { getStreamingResponseStatusCodes } from "./interfaceHelpers.js"; +import { logger } from "./log.js"; +/** + * Initializes a new instance of the ServiceClient. + */ +export class ServiceClient { + /** + * The ServiceClient constructor + * @param credential - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(options = {}) { + var _a, _b; + this._requestContentType = options.requestContentType; + this._endpoint = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri; + if (options.baseUri) { + logger.warning("The baseUri option for SDK Clients has been deprecated, please use endpoint instead."); + } + this._allowInsecureConnection = options.allowInsecureConnection; + this._httpClient = options.httpClient || getCachedDefaultHttpClient(); + this.pipeline = options.pipeline || createDefaultPipeline(options); + if ((_b = options.additionalPolicies) === null || _b === void 0 ? void 0 : _b.length) { + for (const { policy, position } of options.additionalPolicies) { + // Sign happens after Retry and is commonly needed to occur + // before policies that intercept post-retry. + const afterPhase = position === "perRetry" ? "Sign" : undefined; + this.pipeline.addPolicy(policy, { + afterPhase, + }); + } + } + } + /** + * Send the provided httpRequest. + */ + async sendRequest(request) { + return this.pipeline.sendRequest(this._httpClient, request); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @typeParam T - The typed result of the request, based on the OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + */ + async sendOperationRequest(operationArguments, operationSpec) { + const endpoint = operationSpec.baseUrl || this._endpoint; + if (!endpoint) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a endpoint string property that contains the base URL to use."); + } + // Templatized URLs sometimes reference properties on the ServiceClient child class, + // so we have to pass `this` below in order to search these properties if they're + // not part of OperationArguments + const url = getRequestUrl(endpoint, operationSpec, operationArguments, this); + const request = createPipelineRequest({ + url, + }); + request.method = operationSpec.httpMethod; + const operationInfo = getOperationRequestInfo(request); + operationInfo.operationSpec = operationSpec; + operationInfo.operationArguments = operationArguments; + const contentType = operationSpec.contentType || this._requestContentType; + if (contentType && operationSpec.requestBody) { + request.headers.set("Content-Type", contentType); + } + const options = operationArguments.options; + if (options) { + const requestOptions = options.requestOptions; + if (requestOptions) { + if (requestOptions.timeout) { + request.timeout = requestOptions.timeout; + } + if (requestOptions.onUploadProgress) { + request.onUploadProgress = requestOptions.onUploadProgress; + } + if (requestOptions.onDownloadProgress) { + request.onDownloadProgress = requestOptions.onDownloadProgress; + } + if (requestOptions.shouldDeserialize !== undefined) { + operationInfo.shouldDeserialize = requestOptions.shouldDeserialize; + } + if (requestOptions.allowInsecureConnection) { + request.allowInsecureConnection = true; + } + } + if (options.abortSignal) { + request.abortSignal = options.abortSignal; + } + if (options.tracingOptions) { + request.tracingOptions = options.tracingOptions; + } + } + if (this._allowInsecureConnection) { + request.allowInsecureConnection = true; + } + if (request.streamResponseStatusCodes === undefined) { + request.streamResponseStatusCodes = getStreamingResponseStatusCodes(operationSpec); + } + try { + const rawResponse = await this.sendRequest(request); + const flatResponse = flattenResponse(rawResponse, operationSpec.responses[rawResponse.status]); + if (options === null || options === void 0 ? void 0 : options.onResponse) { + options.onResponse(rawResponse, flatResponse); + } + return flatResponse; + } + catch (error) { + if (typeof error === "object" && (error === null || error === void 0 ? void 0 : error.response)) { + const rawResponse = error.response; + const flatResponse = flattenResponse(rawResponse, operationSpec.responses[error.statusCode] || operationSpec.responses["default"]); + error.details = flatResponse; + if (options === null || options === void 0 ? void 0 : options.onResponse) { + options.onResponse(rawResponse, flatResponse, error); + } + } + throw error; + } + } +} +function createDefaultPipeline(options) { + const credentialScopes = getCredentialScopes(options); + const credentialOptions = options.credential && credentialScopes + ? { credentialScopes, credential: options.credential } + : undefined; + return createClientPipeline(Object.assign(Object.assign({}, options), { credentialOptions })); +} +function getCredentialScopes(options) { + if (options.credentialScopes) { + return options.credentialScopes; + } + if (options.endpoint) { + return `${options.endpoint}/.default`; + } + if (options.baseUri) { + return `${options.baseUri}/.default`; + } + if (options.credential && !options.credentialScopes) { + throw new Error(`When using credentials, the ServiceClientOptions must contain either a endpoint or a credentialScopes. Unable to create a bearerTokenAuthenticationPolicy`); + } + return undefined; +} +//# sourceMappingURL=serviceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/state.js b/node_modules/@azure/core-client/dist/browser/state.js new file mode 100644 index 000000000..18b0f80c0 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/state.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Browser-only implementation of the module's state. The browser esm variant will not load the commonjs state, so we do not need to share state between the two. + */ +export const state = { + operationRequestMap: new WeakMap(), +}; +//# sourceMappingURL=state-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/urlHelpers.js b/node_modules/@azure/core-client/dist/browser/urlHelpers.js new file mode 100644 index 000000000..782f4ef71 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/urlHelpers.js @@ -0,0 +1,235 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getOperationArgumentValueFromParameter } from "./operationHelpers.js"; +import { getPathStringFromParameter } from "./interfaceHelpers.js"; +const CollectionFormatToDelimiterMap = { + CSV: ",", + SSV: " ", + Multi: "Multi", + TSV: "\t", + Pipes: "|", +}; +export function getRequestUrl(baseUri, operationSpec, operationArguments, fallbackObject) { + const urlReplacements = calculateUrlReplacements(operationSpec, operationArguments, fallbackObject); + let isAbsolutePath = false; + let requestUrl = replaceAll(baseUri, urlReplacements); + if (operationSpec.path) { + let path = replaceAll(operationSpec.path, urlReplacements); + // QUIRK: sometimes we get a path component like /{nextLink} + // which may be a fully formed URL with a leading /. In that case, we should + // remove the leading / + if (operationSpec.path === "/{nextLink}" && path.startsWith("/")) { + path = path.substring(1); + } + // QUIRK: sometimes we get a path component like {nextLink} + // which may be a fully formed URL. In that case, we should + // ignore the baseUri. + if (isAbsoluteUrl(path)) { + requestUrl = path; + isAbsolutePath = true; + } + else { + requestUrl = appendPath(requestUrl, path); + } + } + const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); + /** + * Notice that this call sets the `noOverwrite` parameter to true if the `requestUrl` + * is an absolute path. This ensures that existing query parameter values in `requestUrl` + * do not get overwritten. On the other hand when `requestUrl` is not absolute path, it + * is still being built so there is nothing to overwrite. + */ + requestUrl = appendQueryParams(requestUrl, queryParams, sequenceParams, isAbsolutePath); + return requestUrl; +} +function replaceAll(input, replacements) { + let result = input; + for (const [searchValue, replaceValue] of replacements) { + result = result.split(searchValue).join(replaceValue); + } + return result; +} +function calculateUrlReplacements(operationSpec, operationArguments, fallbackObject) { + var _a; + const result = new Map(); + if ((_a = operationSpec.urlParameters) === null || _a === void 0 ? void 0 : _a.length) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = getOperationArgumentValueFromParameter(operationArguments, urlParameter, fallbackObject); + const parameterPathString = getPathStringFromParameter(urlParameter); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, parameterPathString); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + result.set(`{${urlParameter.mapper.serializedName || parameterPathString}}`, urlParameterValue); + } + } + return result; +} +function isAbsoluteUrl(url) { + return url.includes("://"); +} +function appendPath(url, pathToAppend) { + if (!pathToAppend) { + return url; + } + const parsedUrl = new URL(url); + let newPath = parsedUrl.pathname; + if (!newPath.endsWith("/")) { + newPath = `${newPath}/`; + } + if (pathToAppend.startsWith("/")) { + pathToAppend = pathToAppend.substring(1); + } + const searchStart = pathToAppend.indexOf("?"); + if (searchStart !== -1) { + const path = pathToAppend.substring(0, searchStart); + const search = pathToAppend.substring(searchStart + 1); + newPath = newPath + path; + if (search) { + parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; + } + } + else { + newPath = newPath + pathToAppend; + } + parsedUrl.pathname = newPath; + return parsedUrl.toString(); +} +function calculateQueryParameters(operationSpec, operationArguments, fallbackObject) { + var _a; + const result = new Map(); + const sequenceParams = new Set(); + if ((_a = operationSpec.queryParameters) === null || _a === void 0 ? void 0 : _a.length) { + for (const queryParameter of operationSpec.queryParameters) { + if (queryParameter.mapper.type.name === "Sequence" && queryParameter.mapper.serializedName) { + sequenceParams.add(queryParameter.mapper.serializedName); + } + let queryParameterValue = getOperationArgumentValueFromParameter(operationArguments, queryParameter, fallbackObject); + if ((queryParameterValue !== undefined && queryParameterValue !== null) || + queryParameter.mapper.required) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter)); + const delimiter = queryParameter.collectionFormat + ? CollectionFormatToDelimiterMap[queryParameter.collectionFormat] + : ""; + if (Array.isArray(queryParameterValue)) { + // replace null and undefined + queryParameterValue = queryParameterValue.map((item) => { + if (item === null || item === undefined) { + return ""; + } + return item; + }); + } + if (queryParameter.collectionFormat === "Multi" && queryParameterValue.length === 0) { + continue; + } + else if (Array.isArray(queryParameterValue) && + (queryParameter.collectionFormat === "SSV" || queryParameter.collectionFormat === "TSV")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + queryParameterValue = queryParameterValue.map((item) => { + return encodeURIComponent(item); + }); + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + // Join pipes and CSV *after* encoding, or the server will be upset. + if (Array.isArray(queryParameterValue) && + (queryParameter.collectionFormat === "CSV" || queryParameter.collectionFormat === "Pipes")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + result.set(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + return { + queryParams: result, + sequenceParams, + }; +} +function simpleParseQueryParams(queryString) { + const result = new Map(); + if (!queryString || queryString[0] !== "?") { + return result; + } + // remove the leading ? + queryString = queryString.slice(1); + const pairs = queryString.split("&"); + for (const pair of pairs) { + const [name, value] = pair.split("=", 2); + const existingValue = result.get(name); + if (existingValue) { + if (Array.isArray(existingValue)) { + existingValue.push(value); + } + else { + result.set(name, [existingValue, value]); + } + } + else { + result.set(name, value); + } + } + return result; +} +/** @internal */ +export function appendQueryParams(url, queryParams, sequenceParams, noOverwrite = false) { + if (queryParams.size === 0) { + return url; + } + const parsedUrl = new URL(url); + // QUIRK: parsedUrl.searchParams will have their name/value pairs decoded, which + // can change their meaning to the server, such as in the case of a SAS signature. + // To avoid accidentally un-encoding a query param, we parse the key/values ourselves + const combinedParams = simpleParseQueryParams(parsedUrl.search); + for (const [name, value] of queryParams) { + const existingValue = combinedParams.get(name); + if (Array.isArray(existingValue)) { + if (Array.isArray(value)) { + existingValue.push(...value); + const valueSet = new Set(existingValue); + combinedParams.set(name, Array.from(valueSet)); + } + else { + existingValue.push(value); + } + } + else if (existingValue) { + if (Array.isArray(value)) { + value.unshift(existingValue); + } + else if (sequenceParams.has(name)) { + combinedParams.set(name, [existingValue, value]); + } + if (!noOverwrite) { + combinedParams.set(name, value); + } + } + else { + combinedParams.set(name, value); + } + } + const searchPieces = []; + for (const [name, value] of combinedParams) { + if (typeof value === "string") { + searchPieces.push(`${name}=${value}`); + } + else if (Array.isArray(value)) { + // QUIRK: If we get an array of values, include multiple key/value pairs + for (const subValue of value) { + searchPieces.push(`${name}=${subValue}`); + } + } + else { + searchPieces.push(`${name}=${value}`); + } + } + // QUIRK: we have to set search manually as searchParams will encode comma when it shouldn't. + parsedUrl.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return parsedUrl.toString(); +} +//# sourceMappingURL=urlHelpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/browser/utils.js b/node_modules/@azure/core-client/dist/browser/utils.js new file mode 100644 index 000000000..7a6268631 --- /dev/null +++ b/node_modules/@azure/core-client/dist/browser/utils.js @@ -0,0 +1,115 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A type guard for a primitive response body. + * @param value - Value to test + * + * @internal + */ +export function isPrimitiveBody(value, mapperTypeName) { + return (mapperTypeName !== "Composite" && + mapperTypeName !== "Dictionary" && + (typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" || + (mapperTypeName === null || mapperTypeName === void 0 ? void 0 : mapperTypeName.match(/^(Date|DateTime|DateTimeRfc1123|UnixTime|ByteArray|Base64Url)$/i)) !== + null || + value === undefined || + value === null)); +} +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Returns true if the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @internal + */ +export function isDuration(value) { + return validateISODuration.test(value); +} +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * Returns true if the provided uuid is valid. + * + * @param uuid - The uuid that needs to be validated. + * + * @internal + */ +export function isValidUuid(uuid) { + return validUuidRegex.test(uuid); +} +/** + * Maps the response as follows: + * - wraps the response body if needed (typically if its type is primitive). + * - returns null if the combination of the headers and the body is empty. + * - otherwise, returns the combination of the headers and the body. + * + * @param responseObject - a representation of the parsed response + * @returns the response that will be returned to the user which can be null and/or wrapped + * + * @internal + */ +function handleNullableResponseAndWrappableBody(responseObject) { + const combinedHeadersAndBody = Object.assign(Object.assign({}, responseObject.headers), responseObject.body); + if (responseObject.hasNullableType && + Object.getOwnPropertyNames(combinedHeadersAndBody).length === 0) { + return responseObject.shouldWrapBody ? { body: null } : null; + } + else { + return responseObject.shouldWrapBody + ? Object.assign(Object.assign({}, responseObject.headers), { body: responseObject.body }) : combinedHeadersAndBody; + } +} +/** + * Take a `FullOperationResponse` and turn it into a flat + * response object to hand back to the consumer. + * @param fullResponse - The processed response from the operation request + * @param responseSpec - The response map from the OperationSpec + * + * @internal + */ +export function flattenResponse(fullResponse, responseSpec) { + var _a, _b; + const parsedHeaders = fullResponse.parsedHeaders; + // head methods never have a body, but we return a boolean set to body property + // to indicate presence/absence of the resource + if (fullResponse.request.method === "HEAD") { + return Object.assign(Object.assign({}, parsedHeaders), { body: fullResponse.parsedBody }); + } + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const isNullable = Boolean(bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.nullable); + const expectedBodyTypeName = bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.type.name; + /** If the body is asked for, we look at the expected body type to handle it */ + if (expectedBodyTypeName === "Stream") { + return Object.assign(Object.assign({}, parsedHeaders), { blobBody: fullResponse.blobBody, readableStreamBody: fullResponse.readableStreamBody }); + } + const modelProperties = (expectedBodyTypeName === "Composite" && + bodyMapper.type.modelProperties) || + {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (expectedBodyTypeName === "Sequence" || isPageableResponse) { + const arrayResponse = (_a = fullResponse.parsedBody) !== null && _a !== void 0 ? _a : []; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = (_b = fullResponse.parsedBody) === null || _b === void 0 ? void 0 : _b[key]; + } + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + return isNullable && + !fullResponse.parsedBody && + !parsedHeaders && + Object.getOwnPropertyNames(modelProperties).length === 0 + ? null + : arrayResponse; + } + return handleNullableResponseAndWrappableBody({ + body: fullResponse.parsedBody, + headers: parsedHeaders, + hasNullableType: isNullable, + shouldWrapBody: isPrimitiveBody(fullResponse.parsedBody, expectedBodyTypeName), + }); +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnClaimChallenge.js b/node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnClaimChallenge.js new file mode 100644 index 000000000..fe592ab03 --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnClaimChallenge.js @@ -0,0 +1,74 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.authorizeRequestOnClaimChallenge = exports.parseCAEChallenge = void 0; +const log_js_1 = require("./log.js"); +const base64_js_1 = require("./base64.js"); +/** + * Converts: `Bearer a="b", c="d", Bearer d="e", f="g"`. + * Into: `[ { a: 'b', c: 'd' }, { d: 'e', f: 'g' } ]`. + * + * @internal + */ +function parseCAEChallenge(challenges) { + const bearerChallenges = `, ${challenges.trim()}`.split(", Bearer ").filter((x) => x); + return bearerChallenges.map((challenge) => { + const challengeParts = `${challenge.trim()}, `.split('", ').filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split('="'))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); + }); +} +exports.parseCAEChallenge = parseCAEChallenge; +/** + * This function can be used as a callback for the `bearerTokenAuthenticationPolicy` of `@azure/core-rest-pipeline`, to support CAE challenges: + * [Continuous Access Evaluation](https://docs.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation). + * + * Call the `bearerTokenAuthenticationPolicy` with the following options: + * + * ```ts + * import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline"; + * import { authorizeRequestOnClaimChallenge } from "@azure/core-client"; + * + * const bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy({ + * authorizeRequestOnChallenge: authorizeRequestOnClaimChallenge + * }); + * ``` + * + * Once provided, the `bearerTokenAuthenticationPolicy` policy will internally handle Continuous Access Evaluation (CAE) challenges. + * When it can't complete a challenge it will return the 401 (unauthorized) response from ARM. + * + * Example challenge with claims: + * + * ``` + * Bearer authorization_uri="https://login.windows-ppe.net/", error="invalid_token", + * error_description="User session has been revoked", + * claims="eyJhY2Nlc3NfdG9rZW4iOnsibmJmIjp7ImVzc2VudGlhbCI6dHJ1ZSwgInZhbHVlIjoiMTYwMzc0MjgwMCJ9fX0=" + * ``` + */ +async function authorizeRequestOnClaimChallenge(onChallengeOptions) { + const { scopes, response } = onChallengeOptions; + const logger = onChallengeOptions.logger || log_js_1.logger; + const challenge = response.headers.get("WWW-Authenticate"); + if (!challenge) { + logger.info(`The WWW-Authenticate header was missing. Failed to perform the Continuous Access Evaluation authentication flow.`); + return false; + } + const challenges = parseCAEChallenge(challenge) || []; + const parsedChallenge = challenges.find((x) => x.claims); + if (!parsedChallenge) { + logger.info(`The WWW-Authenticate header was missing the necessary "claims" to perform the Continuous Access Evaluation authentication flow.`); + return false; + } + const accessToken = await onChallengeOptions.getAccessToken(parsedChallenge.scope ? [parsedChallenge.scope] : scopes, { + claims: (0, base64_js_1.decodeStringToString)(parsedChallenge.claims), + }); + if (!accessToken) { + return false; + } + onChallengeOptions.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + return true; +} +exports.authorizeRequestOnClaimChallenge = authorizeRequestOnClaimChallenge; +//# sourceMappingURL=authorizeRequestOnClaimChallenge.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnTenantChallenge.js b/node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnTenantChallenge.js new file mode 100644 index 000000000..c016f17d7 --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/authorizeRequestOnTenantChallenge.js @@ -0,0 +1,117 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.authorizeRequestOnTenantChallenge = void 0; +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +function isUuid(text) { + return /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/.test(text); +} +/** + * Defines a callback to handle auth challenge for Storage APIs. + * This implements the bearer challenge process described here: https://docs.microsoft.com/rest/api/storageservices/authorize-with-azure-active-directory#bearer-challenge + * Handling has specific features for storage that departs to the general AAD challenge docs. + **/ +const authorizeRequestOnTenantChallenge = async (challengeOptions) => { + const requestOptions = requestToOptions(challengeOptions.request); + const challenge = getChallenge(challengeOptions.response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = buildScopes(challengeOptions, challengeInfo); + const tenantId = extractTenantId(challengeInfo); + if (!tenantId) { + return false; + } + const accessToken = await challengeOptions.getAccessToken(challengeScopes, Object.assign(Object.assign({}, requestOptions), { tenantId })); + if (!accessToken) { + return false; + } + challengeOptions.request.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${accessToken.token}`); + return true; + } + return false; +}; +exports.authorizeRequestOnTenantChallenge = authorizeRequestOnTenantChallenge; +/** + * Extracts the tenant id from the challenge information + * The tenant id is contained in the authorization_uri as the first + * path part. + */ +function extractTenantId(challengeInfo) { + const parsedAuthUri = new URL(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.pathname.split("/"); + const tenantId = pathSegments[1]; + if (tenantId && isUuid(tenantId)) { + return tenantId; + } + return undefined; +} +/** + * Builds the authentication scopes based on the information that comes in the + * challenge information. Scopes url is present in the resource_id, if it is empty + * we keep using the original scopes. + */ +function buildScopes(challengeOptions, challengeInfo) { + if (!challengeInfo.resource_id) { + return challengeOptions.scopes; + } + const challengeScopes = new URL(challengeInfo.resource_id); + challengeScopes.pathname = Constants.DefaultScope; + let scope = challengeScopes.toString(); + if (scope === "https://disk.azure.com/.default") { + // the extra slash is required by the service + scope = "https://disk.azure.com//.default"; + } + return [scope]; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +/** + * Extracts the options form a Pipeline Request for later re-use + */ +function requestToOptions(request) { + return { + abortSignal: request.abortSignal, + requestOptions: { + timeout: request.timeout, + }, + tracingOptions: request.tracingOptions, + }; +} +//# sourceMappingURL=authorizeRequestOnTenantChallenge.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/base64.js b/node_modules/@azure/core-client/dist/commonjs/base64.js new file mode 100644 index 000000000..21ec86543 --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/base64.js @@ -0,0 +1,43 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.decodeStringToString = exports.decodeString = exports.encodeByteArray = exports.encodeString = void 0; +/** + * Encodes a string in base64 format. + * @param value - the string to encode + * @internal + */ +function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +exports.encodeString = encodeString; +/** + * Encodes a byte array in base64 format. + * @param value - the Uint8Aray to encode + * @internal + */ +function encodeByteArray(value) { + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +exports.encodeByteArray = encodeByteArray; +/** + * Decodes a base64 string into a byte array. + * @param value - the base64 string to decode + * @internal + */ +function decodeString(value) { + return Buffer.from(value, "base64"); +} +exports.decodeString = decodeString; +/** + * Decodes a base64 string into a string. + * @param value - the base64 string to decode + * @internal + */ +function decodeStringToString(value) { + return Buffer.from(value, "base64").toString(); +} +exports.decodeStringToString = decodeStringToString; +//# sourceMappingURL=base64.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/deserializationPolicy.js b/node_modules/@azure/core-client/dist/commonjs/deserializationPolicy.js new file mode 100644 index 000000000..431511bae --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/deserializationPolicy.js @@ -0,0 +1,235 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.deserializationPolicy = exports.deserializationPolicyName = void 0; +const interfaces_js_1 = require("./interfaces.js"); +const core_rest_pipeline_1 = require("@azure/core-rest-pipeline"); +const serializer_js_1 = require("./serializer.js"); +const operationHelpers_js_1 = require("./operationHelpers.js"); +const defaultJsonContentTypes = ["application/json", "text/json"]; +const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +/** + * The programmatic identifier of the deserializationPolicy. + */ +exports.deserializationPolicyName = "deserializationPolicy"; +/** + * This policy handles parsing out responses according to OperationSpecs on the request. + */ +function deserializationPolicy(options = {}) { + var _a, _b, _c, _d, _e, _f, _g; + const jsonContentTypes = (_b = (_a = options.expectedContentTypes) === null || _a === void 0 ? void 0 : _a.json) !== null && _b !== void 0 ? _b : defaultJsonContentTypes; + const xmlContentTypes = (_d = (_c = options.expectedContentTypes) === null || _c === void 0 ? void 0 : _c.xml) !== null && _d !== void 0 ? _d : defaultXmlContentTypes; + const parseXML = options.parseXML; + const serializerOptions = options.serializerOptions; + const updatedOptions = { + xml: { + rootName: (_e = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _e !== void 0 ? _e : "", + includeRoot: (_f = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _f !== void 0 ? _f : false, + xmlCharKey: (_g = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _g !== void 0 ? _g : interfaces_js_1.XML_CHARKEY, + }, + }; + return { + name: exports.deserializationPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, updatedOptions, parseXML); + }, + }; +} +exports.deserializationPolicy = deserializationPolicy; +function getOperationResponseMap(parsedResponse) { + let result; + const request = parsedResponse.request; + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + if (operationSpec) { + if (!(operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter)) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + const request = parsedResponse.request; + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); + const shouldDeserialize = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.shouldDeserialize; + let result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +async function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options, parseXML) { + const parsedResponse = await parse(jsonContentTypes, xmlContentTypes, response, options, parseXML); + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(parsedResponse.request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponseMap(parsedResponse); + const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec, options); + if (error) { + throw error; + } + else if (shouldReturnResponse) { + return parsedResponse; + } + // An operation response spec does exist for current status code, so + // use it to deserialize the response. + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } + catch (deserializeError) { + const restError = new core_rest_pipeline_1.RestError(`Error ${deserializeError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse, + }); + throw restError; + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders", { xml: {}, ignoreUnknownProperties: true }); + } + } + return parsedResponse; +} +function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return (expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); +} +function handleErrorResponse(parsedResponse, operationSpec, responseSpec, options) { + var _a; + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + ? isSuccessByStatus + : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; + } + } + else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + const initialErrorMessage = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) + ? `Unexpected status code: ${parsedResponse.status}` + : parsedResponse.bodyAsText; + const error = new core_rest_pipeline_1.RestError(initialErrorMessage, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse, + }); + // If the item failed but there's no error spec or default spec to deserialize the error, + // we should fail so we just throw the parsed response + if (!errorResponseSpec) { + throw error; + } + const defaultBodyMapper = errorResponseSpec.bodyMapper; + const defaultHeadersMapper = errorResponseSpec.headersMapper; + try { + // If error response has a body, try to deserialize it using default body mapper. + // Then try to extract error code & message from it + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let deserializedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) { + valueToDeserialize = []; + const elementName = defaultBodyMapper.xmlElementName; + if (typeof parsedBody === "object" && elementName) { + valueToDeserialize = parsedBody[elementName]; + } + } + deserializedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody", options); + } + const internalError = parsedBody.error || deserializedError || parsedBody; + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + if (defaultBodyMapper) { + error.response.parsedBody = deserializedError; + } + } + // If error response has headers, try to deserialize it using default header mapper + if (parsedResponse.headers && defaultHeadersMapper) { + error.response.parsedHeaders = + operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders"); + } + } + catch (defaultError) { + error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + } + return { error, shouldReturnResponse: false }; +} +async function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts, parseXML) { + var _a; + if (!((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) && + operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + try { + if (contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + operationResponse.parsedBody = JSON.parse(text); + return operationResponse; + } + else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + if (!parseXML) { + throw new Error("Parsing XML not supported."); + } + const body = await parseXML(text, opts.xml); + operationResponse.parsedBody = body; + return operationResponse; + } + } + catch (err) { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || core_rest_pipeline_1.RestError.PARSE_ERROR; + const e = new core_rest_pipeline_1.RestError(msg, { + code: errCode, + statusCode: operationResponse.status, + request: operationResponse.request, + response: operationResponse, + }); + throw e; + } + } + return operationResponse; +} +//# sourceMappingURL=deserializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/httpClientCache.js b/node_modules/@azure/core-client/dist/commonjs/httpClientCache.js new file mode 100644 index 000000000..317a43d06 --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/httpClientCache.js @@ -0,0 +1,15 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getCachedDefaultHttpClient = void 0; +const core_rest_pipeline_1 = require("@azure/core-rest-pipeline"); +let cachedHttpClient; +function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = (0, core_rest_pipeline_1.createDefaultHttpClient)(); + } + return cachedHttpClient; +} +exports.getCachedDefaultHttpClient = getCachedDefaultHttpClient; +//# sourceMappingURL=httpClientCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/index.js b/node_modules/@azure/core-client/dist/commonjs/index.js new file mode 100644 index 000000000..219e25ac9 --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/index.js @@ -0,0 +1,26 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.authorizeRequestOnTenantChallenge = exports.authorizeRequestOnClaimChallenge = exports.serializationPolicyName = exports.serializationPolicy = exports.deserializationPolicyName = exports.deserializationPolicy = exports.XML_CHARKEY = exports.XML_ATTRKEY = exports.createClientPipeline = exports.ServiceClient = exports.MapperTypeNames = exports.createSerializer = void 0; +var serializer_js_1 = require("./serializer.js"); +Object.defineProperty(exports, "createSerializer", { enumerable: true, get: function () { return serializer_js_1.createSerializer; } }); +Object.defineProperty(exports, "MapperTypeNames", { enumerable: true, get: function () { return serializer_js_1.MapperTypeNames; } }); +var serviceClient_js_1 = require("./serviceClient.js"); +Object.defineProperty(exports, "ServiceClient", { enumerable: true, get: function () { return serviceClient_js_1.ServiceClient; } }); +var pipeline_js_1 = require("./pipeline.js"); +Object.defineProperty(exports, "createClientPipeline", { enumerable: true, get: function () { return pipeline_js_1.createClientPipeline; } }); +var interfaces_js_1 = require("./interfaces.js"); +Object.defineProperty(exports, "XML_ATTRKEY", { enumerable: true, get: function () { return interfaces_js_1.XML_ATTRKEY; } }); +Object.defineProperty(exports, "XML_CHARKEY", { enumerable: true, get: function () { return interfaces_js_1.XML_CHARKEY; } }); +var deserializationPolicy_js_1 = require("./deserializationPolicy.js"); +Object.defineProperty(exports, "deserializationPolicy", { enumerable: true, get: function () { return deserializationPolicy_js_1.deserializationPolicy; } }); +Object.defineProperty(exports, "deserializationPolicyName", { enumerable: true, get: function () { return deserializationPolicy_js_1.deserializationPolicyName; } }); +var serializationPolicy_js_1 = require("./serializationPolicy.js"); +Object.defineProperty(exports, "serializationPolicy", { enumerable: true, get: function () { return serializationPolicy_js_1.serializationPolicy; } }); +Object.defineProperty(exports, "serializationPolicyName", { enumerable: true, get: function () { return serializationPolicy_js_1.serializationPolicyName; } }); +var authorizeRequestOnClaimChallenge_js_1 = require("./authorizeRequestOnClaimChallenge.js"); +Object.defineProperty(exports, "authorizeRequestOnClaimChallenge", { enumerable: true, get: function () { return authorizeRequestOnClaimChallenge_js_1.authorizeRequestOnClaimChallenge; } }); +var authorizeRequestOnTenantChallenge_js_1 = require("./authorizeRequestOnTenantChallenge.js"); +Object.defineProperty(exports, "authorizeRequestOnTenantChallenge", { enumerable: true, get: function () { return authorizeRequestOnTenantChallenge_js_1.authorizeRequestOnTenantChallenge; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/interfaceHelpers.js b/node_modules/@azure/core-client/dist/commonjs/interfaceHelpers.js new file mode 100644 index 000000000..3216a12c1 --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/interfaceHelpers.js @@ -0,0 +1,44 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getPathStringFromParameter = exports.getStreamingResponseStatusCodes = void 0; +const serializer_js_1 = require("./serializer.js"); +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +function getStreamingResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} +exports.getStreamingResponseStatusCodes = getStreamingResponseStatusCodes; +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + * @internal + */ +function getPathStringFromParameter(parameter) { + const { parameterPath, mapper } = parameter; + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} +exports.getPathStringFromParameter = getPathStringFromParameter; +//# sourceMappingURL=interfaceHelpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/interfaces.js b/node_modules/@azure/core-client/dist/commonjs/interfaces.js new file mode 100644 index 000000000..d05feb429 --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/interfaces.js @@ -0,0 +1,14 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.XML_CHARKEY = exports.XML_ATTRKEY = void 0; +/** + * Default key used to access the XML attributes. + */ +exports.XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +exports.XML_CHARKEY = "_"; +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/log.js b/node_modules/@azure/core-client/dist/commonjs/log.js new file mode 100644 index 000000000..3af3ec6a1 --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/log.js @@ -0,0 +1,8 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.logger = void 0; +const logger_1 = require("@azure/logger"); +exports.logger = (0, logger_1.createClientLogger)("core-client"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/operationHelpers.js b/node_modules/@azure/core-client/dist/commonjs/operationHelpers.js new file mode 100644 index 000000000..ceaaab9e3 --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/operationHelpers.js @@ -0,0 +1,99 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getOperationRequestInfo = exports.getOperationArgumentValueFromParameter = void 0; +const state_js_1 = require("./state.js"); +/** + * @internal + * Retrieves the value to use for a given operation argument + * @param operationArguments - The arguments passed from the generated client + * @param parameter - The parameter description + * @param fallbackObject - If something isn't found in the arguments bag, look here. + * Generally used to look at the service client properties. + */ +function getOperationArgumentValueFromParameter(operationArguments, parameter, fallbackObject) { + let parameterPath = parameter.parameterPath; + const parameterMapper = parameter.mapper; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound && fallbackObject) { + propertySearchResult = getPropertyFromParameterPath(fallbackObject, parameterPath); + } + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameter(operationArguments, { + parameterPath: propertyPath, + mapper: propertyMapper, + }, fallbackObject); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +exports.getOperationArgumentValueFromParameter = getOperationArgumentValueFromParameter; +function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +const originalRequestSymbol = Symbol.for("@azure/core-client original request"); +function hasOriginalRequest(request) { + return originalRequestSymbol in request; +} +function getOperationRequestInfo(request) { + if (hasOriginalRequest(request)) { + return getOperationRequestInfo(request[originalRequestSymbol]); + } + let info = state_js_1.state.operationRequestMap.get(request); + if (!info) { + info = {}; + state_js_1.state.operationRequestMap.set(request, info); + } + return info; +} +exports.getOperationRequestInfo = getOperationRequestInfo; +//# sourceMappingURL=operationHelpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/package.json b/node_modules/@azure/core-client/dist/commonjs/package.json new file mode 100644 index 000000000..5bbefffba --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/core-client/dist/commonjs/pipeline.js b/node_modules/@azure/core-client/dist/commonjs/pipeline.js new file mode 100644 index 000000000..195938b22 --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/pipeline.js @@ -0,0 +1,30 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createClientPipeline = void 0; +const deserializationPolicy_js_1 = require("./deserializationPolicy.js"); +const core_rest_pipeline_1 = require("@azure/core-rest-pipeline"); +const serializationPolicy_js_1 = require("./serializationPolicy.js"); +/** + * Creates a new Pipeline for use with a Service Client. + * Adds in deserializationPolicy by default. + * Also adds in bearerTokenAuthenticationPolicy if passed a TokenCredential. + * @param options - Options to customize the created pipeline. + */ +function createClientPipeline(options = {}) { + const pipeline = (0, core_rest_pipeline_1.createPipelineFromOptions)(options !== null && options !== void 0 ? options : {}); + if (options.credentialOptions) { + pipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ + credential: options.credentialOptions.credential, + scopes: options.credentialOptions.credentialScopes, + })); + } + pipeline.addPolicy((0, serializationPolicy_js_1.serializationPolicy)(options.serializationOptions), { phase: "Serialize" }); + pipeline.addPolicy((0, deserializationPolicy_js_1.deserializationPolicy)(options.deserializationOptions), { + phase: "Deserialize", + }); + return pipeline; +} +exports.createClientPipeline = createClientPipeline; +//# sourceMappingURL=pipeline.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/serializationPolicy.js b/node_modules/@azure/core-client/dist/commonjs/serializationPolicy.js new file mode 100644 index 000000000..3b4c75b3a --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/serializationPolicy.js @@ -0,0 +1,159 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.serializeRequestBody = exports.serializeHeaders = exports.serializationPolicy = exports.serializationPolicyName = void 0; +const interfaces_js_1 = require("./interfaces.js"); +const operationHelpers_js_1 = require("./operationHelpers.js"); +const serializer_js_1 = require("./serializer.js"); +const interfaceHelpers_js_1 = require("./interfaceHelpers.js"); +/** + * The programmatic identifier of the serializationPolicy. + */ +exports.serializationPolicyName = "serializationPolicy"; +/** + * This policy handles assembling the request body and headers using + * an OperationSpec and OperationArguments on the request. + */ +function serializationPolicy(options = {}) { + const stringifyXML = options.stringifyXML; + return { + name: exports.serializationPolicyName, + async sendRequest(request, next) { + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + const operationArguments = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationArguments; + if (operationSpec && operationArguments) { + serializeHeaders(request, operationArguments, operationSpec); + serializeRequestBody(request, operationArguments, operationSpec, stringifyXML); + } + return next(request); + }, + }; +} +exports.serializationPolicy = serializationPolicy; +/** + * @internal + */ +function serializeHeaders(request, operationArguments, operationSpec) { + var _a, _b; + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, headerParameter); + if ((headerValue !== null && headerValue !== undefined) || headerParameter.mapper.required) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter)); + const headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + request.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + request.headers.set(headerParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter), headerValue); + } + } + } + } + const customHeaders = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.requestOptions) === null || _b === void 0 ? void 0 : _b.customHeaders; + if (customHeaders) { + for (const customHeaderName of Object.keys(customHeaders)) { + request.headers.set(customHeaderName, customHeaders[customHeaderName]); + } + } +} +exports.serializeHeaders = serializeHeaders; +/** + * @internal + */ +function serializeRequestBody(request, operationArguments, operationSpec, stringifyXML = function () { + throw new Error("XML serialization unsupported!"); +}) { + var _a, _b, _c, _d, _e; + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + const updatedOptions = { + xml: { + rootName: (_b = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _b !== void 0 ? _b : "", + includeRoot: (_c = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _c !== void 0 ? _c : false, + xmlCharKey: (_d = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _d !== void 0 ? _d : interfaces_js_1.XML_CHARKEY, + }, + }; + const xmlCharKey = updatedOptions.xml.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + request.body = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, operationSpec.requestBody); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, serializedName, xmlName, xmlElementName, xmlNamespace, xmlNamespacePrefix, nullable, } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if ((request.body !== undefined && request.body !== null) || + (nullable && request.body === null) || + required) { + const requestBodyParameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(operationSpec.requestBody); + request.body = operationSpec.serializer.serialize(bodyMapper, request.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === serializer_js_1.MapperTypeNames.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, request.body, updatedOptions); + if (typeName === serializer_js_1.MapperTypeNames.Sequence) { + request.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, xmlCharKey }); + } + else if (!isStream) { + request.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + } + else if (typeName === serializer_js_1.MapperTypeNames.String && + (((_e = operationSpec.contentType) === null || _e === void 0 ? void 0 : _e.match("text/plain")) || operationSpec.mediaType === "text")) { + // the String serializer has validated that request body is a string + // so just send the string. + return; + } + else if (!isStream) { + request.body = JSON.stringify(request.body); + } + } + } + catch (error) { + throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + request.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, formDataParameter); + if (formDataParameterValue !== undefined && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter); + request.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter), updatedOptions); + } + } + } +} +exports.serializeRequestBody = serializeRequestBody; +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; +} +function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[interfaces_js_1.XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; +} +//# sourceMappingURL=serializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/serializer.js b/node_modules/@azure/core-client/dist/commonjs/serializer.js new file mode 100644 index 000000000..f826ec35a --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/serializer.js @@ -0,0 +1,927 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MapperTypeNames = exports.createSerializer = void 0; +const tslib_1 = require("tslib"); +const base64 = tslib_1.__importStar(require("./base64.js")); +const interfaces_js_1 = require("./interfaces.js"); +const utils_js_1 = require("./utils.js"); +class SerializerImpl { + constructor(modelMappers = {}, isXML = false) { + this.modelMappers = modelMappers; + this.isXML = isXML; + } + /** + * @deprecated Removing the constraints validation on client side. + */ + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value !== undefined && value !== null) { + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; + if (ExclusiveMaximum !== undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum !== undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum !== undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum !== undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems !== undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength !== undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems !== undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength !== undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf !== undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); + } + } + } + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param object - A valid Javascript object to be serialized + * + * @param objectName - Name of the serialized object + * + * @param options - additional options to serialization + * + * @returns A valid serialized Javascript object + */ + serialize(mapper, object, objectName, options = { xml: {} }) { + var _a, _b, _c; + const updatedOptions = { + xml: { + rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : interfaces_js_1.XML_CHARKEY, + }, + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + const { required, nullable } = mapper; + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && (object === undefined || object === null)) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object === undefined || object === null) { + payload = object; + } + else { + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + } + return payload; + } + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param responseBody - A valid Javascript entity to be deserialized + * + * @param objectName - Name of the deserialized object + * + * @param options - Controls behavior of XML parser and builder. + * + * @returns A valid deserialized Javascript object + */ + deserialize(mapper, responseBody, objectName, options = { xml: {} }) { + var _a, _b, _c, _d; + const updatedOptions = { + xml: { + rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : interfaces_js_1.XML_CHARKEY, + }, + ignoreUnknownProperties: (_d = options.ignoreUnknownProperties) !== null && _d !== void 0 ? _d : false, + }; + if (responseBody === undefined || responseBody === null) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } + else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xml.xmlCharKey; + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, + * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. + */ + if (responseBody[interfaces_js_1.XML_ATTRKEY] !== undefined && responseBody[xmlCharKey] !== undefined) { + responseBody = responseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = base64.decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + } +} +/** + * Method that creates and returns a Serializer. + * @param modelMappers - Known models to map + * @param isXML - If XML should be supported + */ +function createSerializer(modelMappers = {}, isXML = false) { + return new SerializerImpl(modelMappers, isXML); +} +exports.createSerializer = createSerializer; +function trimEnd(str, ch) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/-/g, "+").replace(/_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} +function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } + else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } + else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && (0, utils_js_1.isValidUuid)(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } + else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } + else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && + typeof value.pipe !== "function" && // NodeJS.ReadableStream + typeof value.tee !== "function" && // browser ReadableStream + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + // File objects count as a type of Blob, so we want to use instanceof explicitly + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob) && + objectType !== "function") { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, ReadableStream, or () => ReadableStream.`); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + } + return value; +} +function serializeByteArrayType(objectName, value) { + if (value !== undefined && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = base64.encodeByteArray(value); + } + return value; +} +function serializeBase64UrlType(objectName, value) { + if (value !== undefined && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = bufferToBase64Url(value); + } + return value; +} +function serializeDateTypes(typeName, value, objectName) { + if (value !== undefined && value !== null) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!(0, utils_js_1.isDuration)(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + } + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + var _a; + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + let elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + // Quirk: Composite mappers referenced by `element` might + // not have *all* properties declared (like uberParent), + // so let's try to look up the full definition by name. + if (elementType.type.name === "Composite" && elementType.type.className) { + elementType = (_a = serializer.modelMappers[elementType.type.className]) !== null && _a !== void 0 ? _a : elementType; + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix + ? `xmlns:${elementType.xmlNamespacePrefix}` + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = Object.assign({}, serializedValue); + tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + else { + tempArray[i] = {}; + tempArray[i][options.xml.xmlCharKey] = serializedValue; + tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } + else { + tempArray[i] = serializedValue; + } + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; +} +/** + * Resolves the additionalProperties property from a referenced mapper + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + * @param objectName - name of the object being serialized + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by className + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + * @param objectName - name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + } + return serializer.modelMappers[className]; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + } + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object !== undefined && object !== null) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if ((childObject === undefined || childObject === null) && + ((object[key] !== undefined && object[key] !== null) || + propertyMapper.defaultValue !== undefined)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject !== undefined && parentObject !== null) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix + ? `xmlns:${mapper.xmlNamespacePrefix}` + : "xmlns"; + parentObject[interfaces_js_1.XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[interfaces_js_1.XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + } + const propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + (toSerialize === undefined || toSerialize === null)) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== undefined && propName !== undefined && propName !== null) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject[interfaces_js_1.XML_ATTRKEY] = parentObject[interfaces_js_1.XML_ATTRKEY] || {}; + parentObject[interfaces_js_1.XML_ATTRKEY][propName] = serializedValue; + } + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } + else { + parentObject[propName] = value; + } + } + } + } + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); + } + } + } + return payload; + } + return object; +} +function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + const xmlnsKey = propertyMapper.xmlNamespacePrefix + ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[interfaces_js_1.XML_ATTRKEY]) { + return serializedValue; + } + else { + const result = Object.assign({}, serializedValue); + result[interfaces_js_1.XML_ATTRKEY] = xmlNamespace; + return result; + } + } + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[interfaces_js_1.XML_ATTRKEY] = xmlNamespace; + return result; +} +function isSpecialXmlProperty(propertyName, options) { + return [interfaces_js_1.XML_ATTRKEY, options.xml.xmlCharKey].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + var _a, _b; + const xmlCharKey = (_a = options.xml.xmlCharKey) !== null && _a !== void 0 ? _a : interfaces_js_1.XML_CHARKEY; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[interfaces_js_1.XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[interfaces_js_1.XML_ATTRKEY][xmlName], propertyObjectName, options); + } + else if (propertyMapper.xmlIsMsText) { + if (responseBody[xmlCharKey] !== undefined) { + instance[key] = responseBody[xmlCharKey]; + } + else if (typeof responseBody === "string") { + // The special case where xml parser parses "content" into JSON of + // `{ name: "content"}` instead of `{ name: { "_": "content" }}` + instance[key] = responseBody; + } + } + else { + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + const wrapped = responseBody[xmlName]; + const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + handledPropertyNames.push(xmlName); + } + else { + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + handledPropertyNames.push(propertyName); + } + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + let steps = 0; + for (const item of paths) { + if (!res) + break; + steps++; + res = res[item]; + } + // only accept null when reaching the last position of object otherwise it would be undefined + if (res === null && steps < paths.length) { + res = undefined; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + (propertyInstance === undefined || propertyInstance === null)) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; + } + } + } + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } + else if (responseBody && !options.ignoreUnknownProperties) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + /* jshint validthis: true */ + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + var _a; + let element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + // Quirk: Composite mappers referenced by `element` might + // not have *all* properties declared (like uberParent), + // so let's try to look up the full definition by name. + if (element.type.name === "Composite" && element.type.className) { + element = (_a = serializer.modelMappers[element.type.className]) !== null && _a !== void 0 ? _a : element; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + } + return tempArray; + } + return responseBody; +} +function getIndexDiscriminator(discriminators, discriminatorValue, typeName) { + const typeNamesToCheck = [typeName]; + while (typeNamesToCheck.length) { + const currentName = typeNamesToCheck.shift(); + const indexDiscriminator = discriminatorValue === currentName + ? discriminatorValue + : currentName + "." + discriminatorValue; + if (Object.prototype.hasOwnProperty.call(discriminators, indexDiscriminator)) { + return discriminators[indexDiscriminator]; + } + else { + for (const [name, mapper] of Object.entries(discriminators)) { + if (name.startsWith(currentName + ".") && + mapper.type.uberParent === currentName && + mapper.type.className) { + typeNamesToCheck.push(mapper.type.className); + } + } + } + } + return undefined; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var _a; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + let discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName) { + // The serializedName might have \\, which we just want to ignore + if (polymorphicPropertyName === "serializedName") { + discriminatorName = discriminatorName.replace(/\\/gi, ""); + } + const discriminatorValue = object[discriminatorName]; + const typeName = (_a = mapper.type.uberParent) !== null && _a !== void 0 ? _a : mapper.type.className; + if (typeof discriminatorValue === "string" && typeName) { + const polymorphicMapper = getIndexDiscriminator(serializer.modelMappers.discriminators, discriminatorValue, typeName); + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +/** + * Known types of Mappers + */ +exports.MapperTypeNames = { + Base64Url: "Base64Url", + Boolean: "Boolean", + ByteArray: "ByteArray", + Composite: "Composite", + Date: "Date", + DateTime: "DateTime", + DateTimeRfc1123: "DateTimeRfc1123", + Dictionary: "Dictionary", + Enum: "Enum", + Number: "Number", + Object: "Object", + Sequence: "Sequence", + String: "String", + Stream: "Stream", + TimeSpan: "TimeSpan", + UnixTime: "UnixTime", +}; +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/serviceClient.js b/node_modules/@azure/core-client/dist/commonjs/serviceClient.js new file mode 100644 index 000000000..537409997 --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/serviceClient.js @@ -0,0 +1,153 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ServiceClient = void 0; +const core_rest_pipeline_1 = require("@azure/core-rest-pipeline"); +const pipeline_js_1 = require("./pipeline.js"); +const utils_js_1 = require("./utils.js"); +const httpClientCache_js_1 = require("./httpClientCache.js"); +const operationHelpers_js_1 = require("./operationHelpers.js"); +const urlHelpers_js_1 = require("./urlHelpers.js"); +const interfaceHelpers_js_1 = require("./interfaceHelpers.js"); +const log_js_1 = require("./log.js"); +/** + * Initializes a new instance of the ServiceClient. + */ +class ServiceClient { + /** + * The ServiceClient constructor + * @param credential - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(options = {}) { + var _a, _b; + this._requestContentType = options.requestContentType; + this._endpoint = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri; + if (options.baseUri) { + log_js_1.logger.warning("The baseUri option for SDK Clients has been deprecated, please use endpoint instead."); + } + this._allowInsecureConnection = options.allowInsecureConnection; + this._httpClient = options.httpClient || (0, httpClientCache_js_1.getCachedDefaultHttpClient)(); + this.pipeline = options.pipeline || createDefaultPipeline(options); + if ((_b = options.additionalPolicies) === null || _b === void 0 ? void 0 : _b.length) { + for (const { policy, position } of options.additionalPolicies) { + // Sign happens after Retry and is commonly needed to occur + // before policies that intercept post-retry. + const afterPhase = position === "perRetry" ? "Sign" : undefined; + this.pipeline.addPolicy(policy, { + afterPhase, + }); + } + } + } + /** + * Send the provided httpRequest. + */ + async sendRequest(request) { + return this.pipeline.sendRequest(this._httpClient, request); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @typeParam T - The typed result of the request, based on the OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + */ + async sendOperationRequest(operationArguments, operationSpec) { + const endpoint = operationSpec.baseUrl || this._endpoint; + if (!endpoint) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a endpoint string property that contains the base URL to use."); + } + // Templatized URLs sometimes reference properties on the ServiceClient child class, + // so we have to pass `this` below in order to search these properties if they're + // not part of OperationArguments + const url = (0, urlHelpers_js_1.getRequestUrl)(endpoint, operationSpec, operationArguments, this); + const request = (0, core_rest_pipeline_1.createPipelineRequest)({ + url, + }); + request.method = operationSpec.httpMethod; + const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); + operationInfo.operationSpec = operationSpec; + operationInfo.operationArguments = operationArguments; + const contentType = operationSpec.contentType || this._requestContentType; + if (contentType && operationSpec.requestBody) { + request.headers.set("Content-Type", contentType); + } + const options = operationArguments.options; + if (options) { + const requestOptions = options.requestOptions; + if (requestOptions) { + if (requestOptions.timeout) { + request.timeout = requestOptions.timeout; + } + if (requestOptions.onUploadProgress) { + request.onUploadProgress = requestOptions.onUploadProgress; + } + if (requestOptions.onDownloadProgress) { + request.onDownloadProgress = requestOptions.onDownloadProgress; + } + if (requestOptions.shouldDeserialize !== undefined) { + operationInfo.shouldDeserialize = requestOptions.shouldDeserialize; + } + if (requestOptions.allowInsecureConnection) { + request.allowInsecureConnection = true; + } + } + if (options.abortSignal) { + request.abortSignal = options.abortSignal; + } + if (options.tracingOptions) { + request.tracingOptions = options.tracingOptions; + } + } + if (this._allowInsecureConnection) { + request.allowInsecureConnection = true; + } + if (request.streamResponseStatusCodes === undefined) { + request.streamResponseStatusCodes = (0, interfaceHelpers_js_1.getStreamingResponseStatusCodes)(operationSpec); + } + try { + const rawResponse = await this.sendRequest(request); + const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[rawResponse.status]); + if (options === null || options === void 0 ? void 0 : options.onResponse) { + options.onResponse(rawResponse, flatResponse); + } + return flatResponse; + } + catch (error) { + if (typeof error === "object" && (error === null || error === void 0 ? void 0 : error.response)) { + const rawResponse = error.response; + const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[error.statusCode] || operationSpec.responses["default"]); + error.details = flatResponse; + if (options === null || options === void 0 ? void 0 : options.onResponse) { + options.onResponse(rawResponse, flatResponse, error); + } + } + throw error; + } + } +} +exports.ServiceClient = ServiceClient; +function createDefaultPipeline(options) { + const credentialScopes = getCredentialScopes(options); + const credentialOptions = options.credential && credentialScopes + ? { credentialScopes, credential: options.credential } + : undefined; + return (0, pipeline_js_1.createClientPipeline)(Object.assign(Object.assign({}, options), { credentialOptions })); +} +function getCredentialScopes(options) { + if (options.credentialScopes) { + return options.credentialScopes; + } + if (options.endpoint) { + return `${options.endpoint}/.default`; + } + if (options.baseUri) { + return `${options.baseUri}/.default`; + } + if (options.credential && !options.credentialScopes) { + throw new Error(`When using credentials, the ServiceClientOptions must contain either a endpoint or a credentialScopes. Unable to create a bearerTokenAuthenticationPolicy`); + } + return undefined; +} +//# sourceMappingURL=serviceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/state.js b/node_modules/@azure/core-client/dist/commonjs/state.js new file mode 100644 index 000000000..70bbe470b --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/state.js @@ -0,0 +1,12 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.state = void 0; +/** + * Holds the singleton operationRequestMap, to be shared across CJS and ESM imports. + */ +exports.state = { + operationRequestMap: new WeakMap(), +}; +//# sourceMappingURL=state-cjs.cjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-client/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 000000000..6305f1798 --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.43.1" + } + ] +} diff --git a/node_modules/@azure/core-client/dist/commonjs/urlHelpers.js b/node_modules/@azure/core-client/dist/commonjs/urlHelpers.js new file mode 100644 index 000000000..3d36fb18f --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/urlHelpers.js @@ -0,0 +1,240 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.appendQueryParams = exports.getRequestUrl = void 0; +const operationHelpers_js_1 = require("./operationHelpers.js"); +const interfaceHelpers_js_1 = require("./interfaceHelpers.js"); +const CollectionFormatToDelimiterMap = { + CSV: ",", + SSV: " ", + Multi: "Multi", + TSV: "\t", + Pipes: "|", +}; +function getRequestUrl(baseUri, operationSpec, operationArguments, fallbackObject) { + const urlReplacements = calculateUrlReplacements(operationSpec, operationArguments, fallbackObject); + let isAbsolutePath = false; + let requestUrl = replaceAll(baseUri, urlReplacements); + if (operationSpec.path) { + let path = replaceAll(operationSpec.path, urlReplacements); + // QUIRK: sometimes we get a path component like /{nextLink} + // which may be a fully formed URL with a leading /. In that case, we should + // remove the leading / + if (operationSpec.path === "/{nextLink}" && path.startsWith("/")) { + path = path.substring(1); + } + // QUIRK: sometimes we get a path component like {nextLink} + // which may be a fully formed URL. In that case, we should + // ignore the baseUri. + if (isAbsoluteUrl(path)) { + requestUrl = path; + isAbsolutePath = true; + } + else { + requestUrl = appendPath(requestUrl, path); + } + } + const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); + /** + * Notice that this call sets the `noOverwrite` parameter to true if the `requestUrl` + * is an absolute path. This ensures that existing query parameter values in `requestUrl` + * do not get overwritten. On the other hand when `requestUrl` is not absolute path, it + * is still being built so there is nothing to overwrite. + */ + requestUrl = appendQueryParams(requestUrl, queryParams, sequenceParams, isAbsolutePath); + return requestUrl; +} +exports.getRequestUrl = getRequestUrl; +function replaceAll(input, replacements) { + let result = input; + for (const [searchValue, replaceValue] of replacements) { + result = result.split(searchValue).join(replaceValue); + } + return result; +} +function calculateUrlReplacements(operationSpec, operationArguments, fallbackObject) { + var _a; + const result = new Map(); + if ((_a = operationSpec.urlParameters) === null || _a === void 0 ? void 0 : _a.length) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, urlParameter, fallbackObject); + const parameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(urlParameter); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, parameterPathString); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + result.set(`{${urlParameter.mapper.serializedName || parameterPathString}}`, urlParameterValue); + } + } + return result; +} +function isAbsoluteUrl(url) { + return url.includes("://"); +} +function appendPath(url, pathToAppend) { + if (!pathToAppend) { + return url; + } + const parsedUrl = new URL(url); + let newPath = parsedUrl.pathname; + if (!newPath.endsWith("/")) { + newPath = `${newPath}/`; + } + if (pathToAppend.startsWith("/")) { + pathToAppend = pathToAppend.substring(1); + } + const searchStart = pathToAppend.indexOf("?"); + if (searchStart !== -1) { + const path = pathToAppend.substring(0, searchStart); + const search = pathToAppend.substring(searchStart + 1); + newPath = newPath + path; + if (search) { + parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; + } + } + else { + newPath = newPath + pathToAppend; + } + parsedUrl.pathname = newPath; + return parsedUrl.toString(); +} +function calculateQueryParameters(operationSpec, operationArguments, fallbackObject) { + var _a; + const result = new Map(); + const sequenceParams = new Set(); + if ((_a = operationSpec.queryParameters) === null || _a === void 0 ? void 0 : _a.length) { + for (const queryParameter of operationSpec.queryParameters) { + if (queryParameter.mapper.type.name === "Sequence" && queryParameter.mapper.serializedName) { + sequenceParams.add(queryParameter.mapper.serializedName); + } + let queryParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, queryParameter, fallbackObject); + if ((queryParameterValue !== undefined && queryParameterValue !== null) || + queryParameter.mapper.required) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter)); + const delimiter = queryParameter.collectionFormat + ? CollectionFormatToDelimiterMap[queryParameter.collectionFormat] + : ""; + if (Array.isArray(queryParameterValue)) { + // replace null and undefined + queryParameterValue = queryParameterValue.map((item) => { + if (item === null || item === undefined) { + return ""; + } + return item; + }); + } + if (queryParameter.collectionFormat === "Multi" && queryParameterValue.length === 0) { + continue; + } + else if (Array.isArray(queryParameterValue) && + (queryParameter.collectionFormat === "SSV" || queryParameter.collectionFormat === "TSV")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + queryParameterValue = queryParameterValue.map((item) => { + return encodeURIComponent(item); + }); + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + // Join pipes and CSV *after* encoding, or the server will be upset. + if (Array.isArray(queryParameterValue) && + (queryParameter.collectionFormat === "CSV" || queryParameter.collectionFormat === "Pipes")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + result.set(queryParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter), queryParameterValue); + } + } + } + return { + queryParams: result, + sequenceParams, + }; +} +function simpleParseQueryParams(queryString) { + const result = new Map(); + if (!queryString || queryString[0] !== "?") { + return result; + } + // remove the leading ? + queryString = queryString.slice(1); + const pairs = queryString.split("&"); + for (const pair of pairs) { + const [name, value] = pair.split("=", 2); + const existingValue = result.get(name); + if (existingValue) { + if (Array.isArray(existingValue)) { + existingValue.push(value); + } + else { + result.set(name, [existingValue, value]); + } + } + else { + result.set(name, value); + } + } + return result; +} +/** @internal */ +function appendQueryParams(url, queryParams, sequenceParams, noOverwrite = false) { + if (queryParams.size === 0) { + return url; + } + const parsedUrl = new URL(url); + // QUIRK: parsedUrl.searchParams will have their name/value pairs decoded, which + // can change their meaning to the server, such as in the case of a SAS signature. + // To avoid accidentally un-encoding a query param, we parse the key/values ourselves + const combinedParams = simpleParseQueryParams(parsedUrl.search); + for (const [name, value] of queryParams) { + const existingValue = combinedParams.get(name); + if (Array.isArray(existingValue)) { + if (Array.isArray(value)) { + existingValue.push(...value); + const valueSet = new Set(existingValue); + combinedParams.set(name, Array.from(valueSet)); + } + else { + existingValue.push(value); + } + } + else if (existingValue) { + if (Array.isArray(value)) { + value.unshift(existingValue); + } + else if (sequenceParams.has(name)) { + combinedParams.set(name, [existingValue, value]); + } + if (!noOverwrite) { + combinedParams.set(name, value); + } + } + else { + combinedParams.set(name, value); + } + } + const searchPieces = []; + for (const [name, value] of combinedParams) { + if (typeof value === "string") { + searchPieces.push(`${name}=${value}`); + } + else if (Array.isArray(value)) { + // QUIRK: If we get an array of values, include multiple key/value pairs + for (const subValue of value) { + searchPieces.push(`${name}=${subValue}`); + } + } + else { + searchPieces.push(`${name}=${value}`); + } + } + // QUIRK: we have to set search manually as searchParams will encode comma when it shouldn't. + parsedUrl.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return parsedUrl.toString(); +} +exports.appendQueryParams = appendQueryParams; +//# sourceMappingURL=urlHelpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/commonjs/utils.js b/node_modules/@azure/core-client/dist/commonjs/utils.js new file mode 100644 index 000000000..80c5ad75e --- /dev/null +++ b/node_modules/@azure/core-client/dist/commonjs/utils.js @@ -0,0 +1,122 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.flattenResponse = exports.isValidUuid = exports.isDuration = exports.isPrimitiveBody = void 0; +/** + * A type guard for a primitive response body. + * @param value - Value to test + * + * @internal + */ +function isPrimitiveBody(value, mapperTypeName) { + return (mapperTypeName !== "Composite" && + mapperTypeName !== "Dictionary" && + (typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" || + (mapperTypeName === null || mapperTypeName === void 0 ? void 0 : mapperTypeName.match(/^(Date|DateTime|DateTimeRfc1123|UnixTime|ByteArray|Base64Url)$/i)) !== + null || + value === undefined || + value === null)); +} +exports.isPrimitiveBody = isPrimitiveBody; +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Returns true if the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @internal + */ +function isDuration(value) { + return validateISODuration.test(value); +} +exports.isDuration = isDuration; +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * Returns true if the provided uuid is valid. + * + * @param uuid - The uuid that needs to be validated. + * + * @internal + */ +function isValidUuid(uuid) { + return validUuidRegex.test(uuid); +} +exports.isValidUuid = isValidUuid; +/** + * Maps the response as follows: + * - wraps the response body if needed (typically if its type is primitive). + * - returns null if the combination of the headers and the body is empty. + * - otherwise, returns the combination of the headers and the body. + * + * @param responseObject - a representation of the parsed response + * @returns the response that will be returned to the user which can be null and/or wrapped + * + * @internal + */ +function handleNullableResponseAndWrappableBody(responseObject) { + const combinedHeadersAndBody = Object.assign(Object.assign({}, responseObject.headers), responseObject.body); + if (responseObject.hasNullableType && + Object.getOwnPropertyNames(combinedHeadersAndBody).length === 0) { + return responseObject.shouldWrapBody ? { body: null } : null; + } + else { + return responseObject.shouldWrapBody + ? Object.assign(Object.assign({}, responseObject.headers), { body: responseObject.body }) : combinedHeadersAndBody; + } +} +/** + * Take a `FullOperationResponse` and turn it into a flat + * response object to hand back to the consumer. + * @param fullResponse - The processed response from the operation request + * @param responseSpec - The response map from the OperationSpec + * + * @internal + */ +function flattenResponse(fullResponse, responseSpec) { + var _a, _b; + const parsedHeaders = fullResponse.parsedHeaders; + // head methods never have a body, but we return a boolean set to body property + // to indicate presence/absence of the resource + if (fullResponse.request.method === "HEAD") { + return Object.assign(Object.assign({}, parsedHeaders), { body: fullResponse.parsedBody }); + } + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const isNullable = Boolean(bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.nullable); + const expectedBodyTypeName = bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.type.name; + /** If the body is asked for, we look at the expected body type to handle it */ + if (expectedBodyTypeName === "Stream") { + return Object.assign(Object.assign({}, parsedHeaders), { blobBody: fullResponse.blobBody, readableStreamBody: fullResponse.readableStreamBody }); + } + const modelProperties = (expectedBodyTypeName === "Composite" && + bodyMapper.type.modelProperties) || + {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (expectedBodyTypeName === "Sequence" || isPageableResponse) { + const arrayResponse = (_a = fullResponse.parsedBody) !== null && _a !== void 0 ? _a : []; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = (_b = fullResponse.parsedBody) === null || _b === void 0 ? void 0 : _b[key]; + } + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + return isNullable && + !fullResponse.parsedBody && + !parsedHeaders && + Object.getOwnPropertyNames(modelProperties).length === 0 + ? null + : arrayResponse; + } + return handleNullableResponseAndWrappableBody({ + body: fullResponse.parsedBody, + headers: parsedHeaders, + hasNullableType: isNullable, + shouldWrapBody: isPrimitiveBody(fullResponse.parsedBody, expectedBodyTypeName), + }); +} +exports.flattenResponse = flattenResponse; +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/authorizeRequestOnClaimChallenge.js b/node_modules/@azure/core-client/dist/esm/authorizeRequestOnClaimChallenge.js new file mode 100644 index 000000000..9de3e8b83 --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/authorizeRequestOnClaimChallenge.js @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { logger as coreClientLogger } from "./log.js"; +import { decodeStringToString } from "./base64.js"; +/** + * Converts: `Bearer a="b", c="d", Bearer d="e", f="g"`. + * Into: `[ { a: 'b', c: 'd' }, { d: 'e', f: 'g' } ]`. + * + * @internal + */ +export function parseCAEChallenge(challenges) { + const bearerChallenges = `, ${challenges.trim()}`.split(", Bearer ").filter((x) => x); + return bearerChallenges.map((challenge) => { + const challengeParts = `${challenge.trim()}, `.split('", ').filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split('="'))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); + }); +} +/** + * This function can be used as a callback for the `bearerTokenAuthenticationPolicy` of `@azure/core-rest-pipeline`, to support CAE challenges: + * [Continuous Access Evaluation](https://docs.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation). + * + * Call the `bearerTokenAuthenticationPolicy` with the following options: + * + * ```ts + * import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline"; + * import { authorizeRequestOnClaimChallenge } from "@azure/core-client"; + * + * const bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy({ + * authorizeRequestOnChallenge: authorizeRequestOnClaimChallenge + * }); + * ``` + * + * Once provided, the `bearerTokenAuthenticationPolicy` policy will internally handle Continuous Access Evaluation (CAE) challenges. + * When it can't complete a challenge it will return the 401 (unauthorized) response from ARM. + * + * Example challenge with claims: + * + * ``` + * Bearer authorization_uri="https://login.windows-ppe.net/", error="invalid_token", + * error_description="User session has been revoked", + * claims="eyJhY2Nlc3NfdG9rZW4iOnsibmJmIjp7ImVzc2VudGlhbCI6dHJ1ZSwgInZhbHVlIjoiMTYwMzc0MjgwMCJ9fX0=" + * ``` + */ +export async function authorizeRequestOnClaimChallenge(onChallengeOptions) { + const { scopes, response } = onChallengeOptions; + const logger = onChallengeOptions.logger || coreClientLogger; + const challenge = response.headers.get("WWW-Authenticate"); + if (!challenge) { + logger.info(`The WWW-Authenticate header was missing. Failed to perform the Continuous Access Evaluation authentication flow.`); + return false; + } + const challenges = parseCAEChallenge(challenge) || []; + const parsedChallenge = challenges.find((x) => x.claims); + if (!parsedChallenge) { + logger.info(`The WWW-Authenticate header was missing the necessary "claims" to perform the Continuous Access Evaluation authentication flow.`); + return false; + } + const accessToken = await onChallengeOptions.getAccessToken(parsedChallenge.scope ? [parsedChallenge.scope] : scopes, { + claims: decodeStringToString(parsedChallenge.claims), + }); + if (!accessToken) { + return false; + } + onChallengeOptions.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + return true; +} +//# sourceMappingURL=authorizeRequestOnClaimChallenge.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/authorizeRequestOnTenantChallenge.js b/node_modules/@azure/core-client/dist/esm/authorizeRequestOnTenantChallenge.js new file mode 100644 index 000000000..0943fcede --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/authorizeRequestOnTenantChallenge.js @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +function isUuid(text) { + return /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/.test(text); +} +/** + * Defines a callback to handle auth challenge for Storage APIs. + * This implements the bearer challenge process described here: https://docs.microsoft.com/rest/api/storageservices/authorize-with-azure-active-directory#bearer-challenge + * Handling has specific features for storage that departs to the general AAD challenge docs. + **/ +export const authorizeRequestOnTenantChallenge = async (challengeOptions) => { + const requestOptions = requestToOptions(challengeOptions.request); + const challenge = getChallenge(challengeOptions.response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = buildScopes(challengeOptions, challengeInfo); + const tenantId = extractTenantId(challengeInfo); + if (!tenantId) { + return false; + } + const accessToken = await challengeOptions.getAccessToken(challengeScopes, Object.assign(Object.assign({}, requestOptions), { tenantId })); + if (!accessToken) { + return false; + } + challengeOptions.request.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${accessToken.token}`); + return true; + } + return false; +}; +/** + * Extracts the tenant id from the challenge information + * The tenant id is contained in the authorization_uri as the first + * path part. + */ +function extractTenantId(challengeInfo) { + const parsedAuthUri = new URL(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.pathname.split("/"); + const tenantId = pathSegments[1]; + if (tenantId && isUuid(tenantId)) { + return tenantId; + } + return undefined; +} +/** + * Builds the authentication scopes based on the information that comes in the + * challenge information. Scopes url is present in the resource_id, if it is empty + * we keep using the original scopes. + */ +function buildScopes(challengeOptions, challengeInfo) { + if (!challengeInfo.resource_id) { + return challengeOptions.scopes; + } + const challengeScopes = new URL(challengeInfo.resource_id); + challengeScopes.pathname = Constants.DefaultScope; + let scope = challengeScopes.toString(); + if (scope === "https://disk.azure.com/.default") { + // the extra slash is required by the service + scope = "https://disk.azure.com//.default"; + } + return [scope]; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +/** + * Extracts the options form a Pipeline Request for later re-use + */ +function requestToOptions(request) { + return { + abortSignal: request.abortSignal, + requestOptions: { + timeout: request.timeout, + }, + tracingOptions: request.tracingOptions, + }; +} +//# sourceMappingURL=authorizeRequestOnTenantChallenge.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/base64.js b/node_modules/@azure/core-client/dist/esm/base64.js new file mode 100644 index 000000000..f1d3e1e7c --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/base64.js @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Encodes a string in base64 format. + * @param value - the string to encode + * @internal + */ +export function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value - the Uint8Aray to encode + * @internal + */ +export function encodeByteArray(value) { + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value - the base64 string to decode + * @internal + */ +export function decodeString(value) { + return Buffer.from(value, "base64"); +} +/** + * Decodes a base64 string into a string. + * @param value - the base64 string to decode + * @internal + */ +export function decodeStringToString(value) { + return Buffer.from(value, "base64").toString(); +} +//# sourceMappingURL=base64.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/deserializationPolicy.js b/node_modules/@azure/core-client/dist/esm/deserializationPolicy.js new file mode 100644 index 000000000..bdda3cd33 --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/deserializationPolicy.js @@ -0,0 +1,231 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { XML_CHARKEY, } from "./interfaces.js"; +import { RestError, } from "@azure/core-rest-pipeline"; +import { MapperTypeNames } from "./serializer.js"; +import { getOperationRequestInfo } from "./operationHelpers.js"; +const defaultJsonContentTypes = ["application/json", "text/json"]; +const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +/** + * The programmatic identifier of the deserializationPolicy. + */ +export const deserializationPolicyName = "deserializationPolicy"; +/** + * This policy handles parsing out responses according to OperationSpecs on the request. + */ +export function deserializationPolicy(options = {}) { + var _a, _b, _c, _d, _e, _f, _g; + const jsonContentTypes = (_b = (_a = options.expectedContentTypes) === null || _a === void 0 ? void 0 : _a.json) !== null && _b !== void 0 ? _b : defaultJsonContentTypes; + const xmlContentTypes = (_d = (_c = options.expectedContentTypes) === null || _c === void 0 ? void 0 : _c.xml) !== null && _d !== void 0 ? _d : defaultXmlContentTypes; + const parseXML = options.parseXML; + const serializerOptions = options.serializerOptions; + const updatedOptions = { + xml: { + rootName: (_e = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _e !== void 0 ? _e : "", + includeRoot: (_f = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _f !== void 0 ? _f : false, + xmlCharKey: (_g = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _g !== void 0 ? _g : XML_CHARKEY, + }, + }; + return { + name: deserializationPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, updatedOptions, parseXML); + }, + }; +} +function getOperationResponseMap(parsedResponse) { + let result; + const request = parsedResponse.request; + const operationInfo = getOperationRequestInfo(request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + if (operationSpec) { + if (!(operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter)) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + const request = parsedResponse.request; + const operationInfo = getOperationRequestInfo(request); + const shouldDeserialize = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.shouldDeserialize; + let result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +async function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options, parseXML) { + const parsedResponse = await parse(jsonContentTypes, xmlContentTypes, response, options, parseXML); + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationInfo = getOperationRequestInfo(parsedResponse.request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponseMap(parsedResponse); + const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec, options); + if (error) { + throw error; + } + else if (shouldReturnResponse) { + return parsedResponse; + } + // An operation response spec does exist for current status code, so + // use it to deserialize the response. + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperTypeNames.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } + catch (deserializeError) { + const restError = new RestError(`Error ${deserializeError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse, + }); + throw restError; + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders", { xml: {}, ignoreUnknownProperties: true }); + } + } + return parsedResponse; +} +function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return (expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); +} +function handleErrorResponse(parsedResponse, operationSpec, responseSpec, options) { + var _a; + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + ? isSuccessByStatus + : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; + } + } + else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + const initialErrorMessage = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) + ? `Unexpected status code: ${parsedResponse.status}` + : parsedResponse.bodyAsText; + const error = new RestError(initialErrorMessage, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse, + }); + // If the item failed but there's no error spec or default spec to deserialize the error, + // we should fail so we just throw the parsed response + if (!errorResponseSpec) { + throw error; + } + const defaultBodyMapper = errorResponseSpec.bodyMapper; + const defaultHeadersMapper = errorResponseSpec.headersMapper; + try { + // If error response has a body, try to deserialize it using default body mapper. + // Then try to extract error code & message from it + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let deserializedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === MapperTypeNames.Sequence) { + valueToDeserialize = []; + const elementName = defaultBodyMapper.xmlElementName; + if (typeof parsedBody === "object" && elementName) { + valueToDeserialize = parsedBody[elementName]; + } + } + deserializedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody", options); + } + const internalError = parsedBody.error || deserializedError || parsedBody; + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + if (defaultBodyMapper) { + error.response.parsedBody = deserializedError; + } + } + // If error response has headers, try to deserialize it using default header mapper + if (parsedResponse.headers && defaultHeadersMapper) { + error.response.parsedHeaders = + operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders"); + } + } + catch (defaultError) { + error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + } + return { error, shouldReturnResponse: false }; +} +async function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts, parseXML) { + var _a; + if (!((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) && + operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + try { + if (contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + operationResponse.parsedBody = JSON.parse(text); + return operationResponse; + } + else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + if (!parseXML) { + throw new Error("Parsing XML not supported."); + } + const body = await parseXML(text, opts.xml); + operationResponse.parsedBody = body; + return operationResponse; + } + } + catch (err) { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || RestError.PARSE_ERROR; + const e = new RestError(msg, { + code: errCode, + statusCode: operationResponse.status, + request: operationResponse.request, + response: operationResponse, + }); + throw e; + } + } + return operationResponse; +} +//# sourceMappingURL=deserializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/httpClientCache.js b/node_modules/@azure/core-client/dist/esm/httpClientCache.js new file mode 100644 index 000000000..ad48a498c --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/httpClientCache.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createDefaultHttpClient } from "@azure/core-rest-pipeline"; +let cachedHttpClient; +export function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = createDefaultHttpClient(); + } + return cachedHttpClient; +} +//# sourceMappingURL=httpClientCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/index.js b/node_modules/@azure/core-client/dist/esm/index.js new file mode 100644 index 000000000..3f91b1c44 --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/index.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { createSerializer, MapperTypeNames } from "./serializer.js"; +export { ServiceClient } from "./serviceClient.js"; +export { createClientPipeline } from "./pipeline.js"; +export { XML_ATTRKEY, XML_CHARKEY, } from "./interfaces.js"; +export { deserializationPolicy, deserializationPolicyName, } from "./deserializationPolicy.js"; +export { serializationPolicy, serializationPolicyName, } from "./serializationPolicy.js"; +export { authorizeRequestOnClaimChallenge } from "./authorizeRequestOnClaimChallenge.js"; +export { authorizeRequestOnTenantChallenge } from "./authorizeRequestOnTenantChallenge.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/interfaceHelpers.js b/node_modules/@azure/core-client/dist/esm/interfaceHelpers.js new file mode 100644 index 000000000..80b8905e6 --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/interfaceHelpers.js @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { MapperTypeNames } from "./serializer.js"; +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +export function getStreamingResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperTypeNames.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + * @internal + */ +export function getPathStringFromParameter(parameter) { + const { parameterPath, mapper } = parameter; + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} +//# sourceMappingURL=interfaceHelpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/interfaces.js b/node_modules/@azure/core-client/dist/esm/interfaces.js new file mode 100644 index 000000000..6a3bc345f --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/interfaces.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Default key used to access the XML attributes. + */ +export const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export const XML_CHARKEY = "_"; +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/log.js b/node_modules/@azure/core-client/dist/esm/log.js new file mode 100644 index 000000000..9caaedac8 --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/log.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +export const logger = createClientLogger("core-client"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/operationHelpers.js b/node_modules/@azure/core-client/dist/esm/operationHelpers.js new file mode 100644 index 000000000..942ba360f --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/operationHelpers.js @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { state } from "./state.js"; +/** + * @internal + * Retrieves the value to use for a given operation argument + * @param operationArguments - The arguments passed from the generated client + * @param parameter - The parameter description + * @param fallbackObject - If something isn't found in the arguments bag, look here. + * Generally used to look at the service client properties. + */ +export function getOperationArgumentValueFromParameter(operationArguments, parameter, fallbackObject) { + let parameterPath = parameter.parameterPath; + const parameterMapper = parameter.mapper; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound && fallbackObject) { + propertySearchResult = getPropertyFromParameterPath(fallbackObject, parameterPath); + } + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameter(operationArguments, { + parameterPath: propertyPath, + mapper: propertyMapper, + }, fallbackObject); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +const originalRequestSymbol = Symbol.for("@azure/core-client original request"); +function hasOriginalRequest(request) { + return originalRequestSymbol in request; +} +export function getOperationRequestInfo(request) { + if (hasOriginalRequest(request)) { + return getOperationRequestInfo(request[originalRequestSymbol]); + } + let info = state.operationRequestMap.get(request); + if (!info) { + info = {}; + state.operationRequestMap.set(request, info); + } + return info; +} +//# sourceMappingURL=operationHelpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/package.json b/node_modules/@azure/core-client/dist/esm/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-client/dist/esm/pipeline.js b/node_modules/@azure/core-client/dist/esm/pipeline.js new file mode 100644 index 000000000..b320ce619 --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/pipeline.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { deserializationPolicy } from "./deserializationPolicy.js"; +import { bearerTokenAuthenticationPolicy, createPipelineFromOptions, } from "@azure/core-rest-pipeline"; +import { serializationPolicy } from "./serializationPolicy.js"; +/** + * Creates a new Pipeline for use with a Service Client. + * Adds in deserializationPolicy by default. + * Also adds in bearerTokenAuthenticationPolicy if passed a TokenCredential. + * @param options - Options to customize the created pipeline. + */ +export function createClientPipeline(options = {}) { + const pipeline = createPipelineFromOptions(options !== null && options !== void 0 ? options : {}); + if (options.credentialOptions) { + pipeline.addPolicy(bearerTokenAuthenticationPolicy({ + credential: options.credentialOptions.credential, + scopes: options.credentialOptions.credentialScopes, + })); + } + pipeline.addPolicy(serializationPolicy(options.serializationOptions), { phase: "Serialize" }); + pipeline.addPolicy(deserializationPolicy(options.deserializationOptions), { + phase: "Deserialize", + }); + return pipeline; +} +//# sourceMappingURL=pipeline.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/serializationPolicy.js b/node_modules/@azure/core-client/dist/esm/serializationPolicy.js new file mode 100644 index 000000000..a75aceba7 --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/serializationPolicy.js @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { XML_ATTRKEY, XML_CHARKEY, } from "./interfaces.js"; +import { getOperationArgumentValueFromParameter, getOperationRequestInfo, } from "./operationHelpers.js"; +import { MapperTypeNames } from "./serializer.js"; +import { getPathStringFromParameter } from "./interfaceHelpers.js"; +/** + * The programmatic identifier of the serializationPolicy. + */ +export const serializationPolicyName = "serializationPolicy"; +/** + * This policy handles assembling the request body and headers using + * an OperationSpec and OperationArguments on the request. + */ +export function serializationPolicy(options = {}) { + const stringifyXML = options.stringifyXML; + return { + name: serializationPolicyName, + async sendRequest(request, next) { + const operationInfo = getOperationRequestInfo(request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + const operationArguments = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationArguments; + if (operationSpec && operationArguments) { + serializeHeaders(request, operationArguments, operationSpec); + serializeRequestBody(request, operationArguments, operationSpec, stringifyXML); + } + return next(request); + }, + }; +} +/** + * @internal + */ +export function serializeHeaders(request, operationArguments, operationSpec) { + var _a, _b; + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = getOperationArgumentValueFromParameter(operationArguments, headerParameter); + if ((headerValue !== null && headerValue !== undefined) || headerParameter.mapper.required) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter)); + const headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + request.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + request.headers.set(headerParameter.mapper.serializedName || getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + const customHeaders = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.requestOptions) === null || _b === void 0 ? void 0 : _b.customHeaders; + if (customHeaders) { + for (const customHeaderName of Object.keys(customHeaders)) { + request.headers.set(customHeaderName, customHeaders[customHeaderName]); + } + } +} +/** + * @internal + */ +export function serializeRequestBody(request, operationArguments, operationSpec, stringifyXML = function () { + throw new Error("XML serialization unsupported!"); +}) { + var _a, _b, _c, _d, _e; + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + const updatedOptions = { + xml: { + rootName: (_b = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _b !== void 0 ? _b : "", + includeRoot: (_c = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _c !== void 0 ? _c : false, + xmlCharKey: (_d = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _d !== void 0 ? _d : XML_CHARKEY, + }, + }; + const xmlCharKey = updatedOptions.xml.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + request.body = getOperationArgumentValueFromParameter(operationArguments, operationSpec.requestBody); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, serializedName, xmlName, xmlElementName, xmlNamespace, xmlNamespacePrefix, nullable, } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if ((request.body !== undefined && request.body !== null) || + (nullable && request.body === null) || + required) { + const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + request.body = operationSpec.serializer.serialize(bodyMapper, request.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === MapperTypeNames.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, request.body, updatedOptions); + if (typeName === MapperTypeNames.Sequence) { + request.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, xmlCharKey }); + } + else if (!isStream) { + request.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + } + else if (typeName === MapperTypeNames.String && + (((_e = operationSpec.contentType) === null || _e === void 0 ? void 0 : _e.match("text/plain")) || operationSpec.mediaType === "text")) { + // the String serializer has validated that request body is a string + // so just send the string. + return; + } + else if (!isStream) { + request.body = JSON.stringify(request.body); + } + } + } + catch (error) { + throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + request.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = getOperationArgumentValueFromParameter(operationArguments, formDataParameter); + if (formDataParameterValue !== undefined && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + request.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); + } + } + } +} +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; +} +function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; +} +//# sourceMappingURL=serializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/serializer.js b/node_modules/@azure/core-client/dist/esm/serializer.js new file mode 100644 index 000000000..9dc240521 --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/serializer.js @@ -0,0 +1,922 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as base64 from "./base64.js"; +import { XML_ATTRKEY, XML_CHARKEY, } from "./interfaces.js"; +import { isDuration, isValidUuid } from "./utils.js"; +class SerializerImpl { + constructor(modelMappers = {}, isXML = false) { + this.modelMappers = modelMappers; + this.isXML = isXML; + } + /** + * @deprecated Removing the constraints validation on client side. + */ + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value !== undefined && value !== null) { + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; + if (ExclusiveMaximum !== undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum !== undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum !== undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum !== undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems !== undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength !== undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems !== undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength !== undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf !== undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); + } + } + } + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param object - A valid Javascript object to be serialized + * + * @param objectName - Name of the serialized object + * + * @param options - additional options to serialization + * + * @returns A valid serialized Javascript object + */ + serialize(mapper, object, objectName, options = { xml: {} }) { + var _a, _b, _c; + const updatedOptions = { + xml: { + rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }, + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + const { required, nullable } = mapper; + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && (object === undefined || object === null)) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object === undefined || object === null) { + payload = object; + } + else { + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + } + return payload; + } + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param responseBody - A valid Javascript entity to be deserialized + * + * @param objectName - Name of the deserialized object + * + * @param options - Controls behavior of XML parser and builder. + * + * @returns A valid deserialized Javascript object + */ + deserialize(mapper, responseBody, objectName, options = { xml: {} }) { + var _a, _b, _c, _d; + const updatedOptions = { + xml: { + rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }, + ignoreUnknownProperties: (_d = options.ignoreUnknownProperties) !== null && _d !== void 0 ? _d : false, + }; + if (responseBody === undefined || responseBody === null) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } + else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xml.xmlCharKey; + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, + * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. + */ + if (responseBody[XML_ATTRKEY] !== undefined && responseBody[xmlCharKey] !== undefined) { + responseBody = responseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = base64.decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + } +} +/** + * Method that creates and returns a Serializer. + * @param modelMappers - Known models to map + * @param isXML - If XML should be supported + */ +export function createSerializer(modelMappers = {}, isXML = false) { + return new SerializerImpl(modelMappers, isXML); +} +function trimEnd(str, ch) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/-/g, "+").replace(/_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} +function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } + else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } + else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } + else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } + else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && + typeof value.pipe !== "function" && // NodeJS.ReadableStream + typeof value.tee !== "function" && // browser ReadableStream + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + // File objects count as a type of Blob, so we want to use instanceof explicitly + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob) && + objectType !== "function") { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, ReadableStream, or () => ReadableStream.`); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + } + return value; +} +function serializeByteArrayType(objectName, value) { + if (value !== undefined && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = base64.encodeByteArray(value); + } + return value; +} +function serializeBase64UrlType(objectName, value) { + if (value !== undefined && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = bufferToBase64Url(value); + } + return value; +} +function serializeDateTypes(typeName, value, objectName) { + if (value !== undefined && value !== null) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!isDuration(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + } + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + var _a; + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + let elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + // Quirk: Composite mappers referenced by `element` might + // not have *all* properties declared (like uberParent), + // so let's try to look up the full definition by name. + if (elementType.type.name === "Composite" && elementType.type.className) { + elementType = (_a = serializer.modelMappers[elementType.type.className]) !== null && _a !== void 0 ? _a : elementType; + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix + ? `xmlns:${elementType.xmlNamespacePrefix}` + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = Object.assign({}, serializedValue); + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + else { + tempArray[i] = {}; + tempArray[i][options.xml.xmlCharKey] = serializedValue; + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } + else { + tempArray[i] = serializedValue; + } + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; +} +/** + * Resolves the additionalProperties property from a referenced mapper + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + * @param objectName - name of the object being serialized + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by className + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + * @param objectName - name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + } + return serializer.modelMappers[className]; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + } + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object !== undefined && object !== null) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if ((childObject === undefined || childObject === null) && + ((object[key] !== undefined && object[key] !== null) || + propertyMapper.defaultValue !== undefined)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject !== undefined && parentObject !== null) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix + ? `xmlns:${mapper.xmlNamespacePrefix}` + : "xmlns"; + parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + } + const propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + (toSerialize === undefined || toSerialize === null)) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== undefined && propName !== undefined && propName !== null) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; + parentObject[XML_ATTRKEY][propName] = serializedValue; + } + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } + else { + parentObject[propName] = value; + } + } + } + } + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); + } + } + } + return payload; + } + return object; +} +function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + const xmlnsKey = propertyMapper.xmlNamespacePrefix + ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[XML_ATTRKEY]) { + return serializedValue; + } + else { + const result = Object.assign({}, serializedValue); + result[XML_ATTRKEY] = xmlNamespace; + return result; + } + } + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = xmlNamespace; + return result; +} +function isSpecialXmlProperty(propertyName, options) { + return [XML_ATTRKEY, options.xml.xmlCharKey].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + var _a, _b; + const xmlCharKey = (_a = options.xml.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); + } + else if (propertyMapper.xmlIsMsText) { + if (responseBody[xmlCharKey] !== undefined) { + instance[key] = responseBody[xmlCharKey]; + } + else if (typeof responseBody === "string") { + // The special case where xml parser parses "content" into JSON of + // `{ name: "content"}` instead of `{ name: { "_": "content" }}` + instance[key] = responseBody; + } + } + else { + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + const wrapped = responseBody[xmlName]; + const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + handledPropertyNames.push(xmlName); + } + else { + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + handledPropertyNames.push(propertyName); + } + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + let steps = 0; + for (const item of paths) { + if (!res) + break; + steps++; + res = res[item]; + } + // only accept null when reaching the last position of object otherwise it would be undefined + if (res === null && steps < paths.length) { + res = undefined; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + (propertyInstance === undefined || propertyInstance === null)) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; + } + } + } + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } + else if (responseBody && !options.ignoreUnknownProperties) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + /* jshint validthis: true */ + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + var _a; + let element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + // Quirk: Composite mappers referenced by `element` might + // not have *all* properties declared (like uberParent), + // so let's try to look up the full definition by name. + if (element.type.name === "Composite" && element.type.className) { + element = (_a = serializer.modelMappers[element.type.className]) !== null && _a !== void 0 ? _a : element; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + } + return tempArray; + } + return responseBody; +} +function getIndexDiscriminator(discriminators, discriminatorValue, typeName) { + const typeNamesToCheck = [typeName]; + while (typeNamesToCheck.length) { + const currentName = typeNamesToCheck.shift(); + const indexDiscriminator = discriminatorValue === currentName + ? discriminatorValue + : currentName + "." + discriminatorValue; + if (Object.prototype.hasOwnProperty.call(discriminators, indexDiscriminator)) { + return discriminators[indexDiscriminator]; + } + else { + for (const [name, mapper] of Object.entries(discriminators)) { + if (name.startsWith(currentName + ".") && + mapper.type.uberParent === currentName && + mapper.type.className) { + typeNamesToCheck.push(mapper.type.className); + } + } + } + } + return undefined; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var _a; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + let discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName) { + // The serializedName might have \\, which we just want to ignore + if (polymorphicPropertyName === "serializedName") { + discriminatorName = discriminatorName.replace(/\\/gi, ""); + } + const discriminatorValue = object[discriminatorName]; + const typeName = (_a = mapper.type.uberParent) !== null && _a !== void 0 ? _a : mapper.type.className; + if (typeof discriminatorValue === "string" && typeName) { + const polymorphicMapper = getIndexDiscriminator(serializer.modelMappers.discriminators, discriminatorValue, typeName); + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +/** + * Known types of Mappers + */ +export const MapperTypeNames = { + Base64Url: "Base64Url", + Boolean: "Boolean", + ByteArray: "ByteArray", + Composite: "Composite", + Date: "Date", + DateTime: "DateTime", + DateTimeRfc1123: "DateTimeRfc1123", + Dictionary: "Dictionary", + Enum: "Enum", + Number: "Number", + Object: "Object", + Sequence: "Sequence", + String: "String", + Stream: "Stream", + TimeSpan: "TimeSpan", + UnixTime: "UnixTime", +}; +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/serviceClient.js b/node_modules/@azure/core-client/dist/esm/serviceClient.js new file mode 100644 index 000000000..eccd0d8cf --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/serviceClient.js @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createPipelineRequest, } from "@azure/core-rest-pipeline"; +import { createClientPipeline } from "./pipeline.js"; +import { flattenResponse } from "./utils.js"; +import { getCachedDefaultHttpClient } from "./httpClientCache.js"; +import { getOperationRequestInfo } from "./operationHelpers.js"; +import { getRequestUrl } from "./urlHelpers.js"; +import { getStreamingResponseStatusCodes } from "./interfaceHelpers.js"; +import { logger } from "./log.js"; +/** + * Initializes a new instance of the ServiceClient. + */ +export class ServiceClient { + /** + * The ServiceClient constructor + * @param credential - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(options = {}) { + var _a, _b; + this._requestContentType = options.requestContentType; + this._endpoint = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri; + if (options.baseUri) { + logger.warning("The baseUri option for SDK Clients has been deprecated, please use endpoint instead."); + } + this._allowInsecureConnection = options.allowInsecureConnection; + this._httpClient = options.httpClient || getCachedDefaultHttpClient(); + this.pipeline = options.pipeline || createDefaultPipeline(options); + if ((_b = options.additionalPolicies) === null || _b === void 0 ? void 0 : _b.length) { + for (const { policy, position } of options.additionalPolicies) { + // Sign happens after Retry and is commonly needed to occur + // before policies that intercept post-retry. + const afterPhase = position === "perRetry" ? "Sign" : undefined; + this.pipeline.addPolicy(policy, { + afterPhase, + }); + } + } + } + /** + * Send the provided httpRequest. + */ + async sendRequest(request) { + return this.pipeline.sendRequest(this._httpClient, request); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @typeParam T - The typed result of the request, based on the OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + */ + async sendOperationRequest(operationArguments, operationSpec) { + const endpoint = operationSpec.baseUrl || this._endpoint; + if (!endpoint) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a endpoint string property that contains the base URL to use."); + } + // Templatized URLs sometimes reference properties on the ServiceClient child class, + // so we have to pass `this` below in order to search these properties if they're + // not part of OperationArguments + const url = getRequestUrl(endpoint, operationSpec, operationArguments, this); + const request = createPipelineRequest({ + url, + }); + request.method = operationSpec.httpMethod; + const operationInfo = getOperationRequestInfo(request); + operationInfo.operationSpec = operationSpec; + operationInfo.operationArguments = operationArguments; + const contentType = operationSpec.contentType || this._requestContentType; + if (contentType && operationSpec.requestBody) { + request.headers.set("Content-Type", contentType); + } + const options = operationArguments.options; + if (options) { + const requestOptions = options.requestOptions; + if (requestOptions) { + if (requestOptions.timeout) { + request.timeout = requestOptions.timeout; + } + if (requestOptions.onUploadProgress) { + request.onUploadProgress = requestOptions.onUploadProgress; + } + if (requestOptions.onDownloadProgress) { + request.onDownloadProgress = requestOptions.onDownloadProgress; + } + if (requestOptions.shouldDeserialize !== undefined) { + operationInfo.shouldDeserialize = requestOptions.shouldDeserialize; + } + if (requestOptions.allowInsecureConnection) { + request.allowInsecureConnection = true; + } + } + if (options.abortSignal) { + request.abortSignal = options.abortSignal; + } + if (options.tracingOptions) { + request.tracingOptions = options.tracingOptions; + } + } + if (this._allowInsecureConnection) { + request.allowInsecureConnection = true; + } + if (request.streamResponseStatusCodes === undefined) { + request.streamResponseStatusCodes = getStreamingResponseStatusCodes(operationSpec); + } + try { + const rawResponse = await this.sendRequest(request); + const flatResponse = flattenResponse(rawResponse, operationSpec.responses[rawResponse.status]); + if (options === null || options === void 0 ? void 0 : options.onResponse) { + options.onResponse(rawResponse, flatResponse); + } + return flatResponse; + } + catch (error) { + if (typeof error === "object" && (error === null || error === void 0 ? void 0 : error.response)) { + const rawResponse = error.response; + const flatResponse = flattenResponse(rawResponse, operationSpec.responses[error.statusCode] || operationSpec.responses["default"]); + error.details = flatResponse; + if (options === null || options === void 0 ? void 0 : options.onResponse) { + options.onResponse(rawResponse, flatResponse, error); + } + } + throw error; + } + } +} +function createDefaultPipeline(options) { + const credentialScopes = getCredentialScopes(options); + const credentialOptions = options.credential && credentialScopes + ? { credentialScopes, credential: options.credential } + : undefined; + return createClientPipeline(Object.assign(Object.assign({}, options), { credentialOptions })); +} +function getCredentialScopes(options) { + if (options.credentialScopes) { + return options.credentialScopes; + } + if (options.endpoint) { + return `${options.endpoint}/.default`; + } + if (options.baseUri) { + return `${options.baseUri}/.default`; + } + if (options.credential && !options.credentialScopes) { + throw new Error(`When using credentials, the ServiceClientOptions must contain either a endpoint or a credentialScopes. Unable to create a bearerTokenAuthenticationPolicy`); + } + return undefined; +} +//# sourceMappingURL=serviceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/state.js b/node_modules/@azure/core-client/dist/esm/state.js new file mode 100644 index 000000000..1699f50a9 --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/state.js @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// @ts-expect-error The recommended approach to sharing module state between ESM and CJS. +// See https://github.com/isaacs/tshy/blob/main/README.md#module-local-state for additional information. +import { state as cjsState } from "../commonjs/state.js"; +/** + * Defines the shared state between CJS and ESM by re-exporting the CJS state. + */ +export const state = cjsState; +//# sourceMappingURL=state.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/urlHelpers.js b/node_modules/@azure/core-client/dist/esm/urlHelpers.js new file mode 100644 index 000000000..782f4ef71 --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/urlHelpers.js @@ -0,0 +1,235 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getOperationArgumentValueFromParameter } from "./operationHelpers.js"; +import { getPathStringFromParameter } from "./interfaceHelpers.js"; +const CollectionFormatToDelimiterMap = { + CSV: ",", + SSV: " ", + Multi: "Multi", + TSV: "\t", + Pipes: "|", +}; +export function getRequestUrl(baseUri, operationSpec, operationArguments, fallbackObject) { + const urlReplacements = calculateUrlReplacements(operationSpec, operationArguments, fallbackObject); + let isAbsolutePath = false; + let requestUrl = replaceAll(baseUri, urlReplacements); + if (operationSpec.path) { + let path = replaceAll(operationSpec.path, urlReplacements); + // QUIRK: sometimes we get a path component like /{nextLink} + // which may be a fully formed URL with a leading /. In that case, we should + // remove the leading / + if (operationSpec.path === "/{nextLink}" && path.startsWith("/")) { + path = path.substring(1); + } + // QUIRK: sometimes we get a path component like {nextLink} + // which may be a fully formed URL. In that case, we should + // ignore the baseUri. + if (isAbsoluteUrl(path)) { + requestUrl = path; + isAbsolutePath = true; + } + else { + requestUrl = appendPath(requestUrl, path); + } + } + const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); + /** + * Notice that this call sets the `noOverwrite` parameter to true if the `requestUrl` + * is an absolute path. This ensures that existing query parameter values in `requestUrl` + * do not get overwritten. On the other hand when `requestUrl` is not absolute path, it + * is still being built so there is nothing to overwrite. + */ + requestUrl = appendQueryParams(requestUrl, queryParams, sequenceParams, isAbsolutePath); + return requestUrl; +} +function replaceAll(input, replacements) { + let result = input; + for (const [searchValue, replaceValue] of replacements) { + result = result.split(searchValue).join(replaceValue); + } + return result; +} +function calculateUrlReplacements(operationSpec, operationArguments, fallbackObject) { + var _a; + const result = new Map(); + if ((_a = operationSpec.urlParameters) === null || _a === void 0 ? void 0 : _a.length) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = getOperationArgumentValueFromParameter(operationArguments, urlParameter, fallbackObject); + const parameterPathString = getPathStringFromParameter(urlParameter); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, parameterPathString); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + result.set(`{${urlParameter.mapper.serializedName || parameterPathString}}`, urlParameterValue); + } + } + return result; +} +function isAbsoluteUrl(url) { + return url.includes("://"); +} +function appendPath(url, pathToAppend) { + if (!pathToAppend) { + return url; + } + const parsedUrl = new URL(url); + let newPath = parsedUrl.pathname; + if (!newPath.endsWith("/")) { + newPath = `${newPath}/`; + } + if (pathToAppend.startsWith("/")) { + pathToAppend = pathToAppend.substring(1); + } + const searchStart = pathToAppend.indexOf("?"); + if (searchStart !== -1) { + const path = pathToAppend.substring(0, searchStart); + const search = pathToAppend.substring(searchStart + 1); + newPath = newPath + path; + if (search) { + parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; + } + } + else { + newPath = newPath + pathToAppend; + } + parsedUrl.pathname = newPath; + return parsedUrl.toString(); +} +function calculateQueryParameters(operationSpec, operationArguments, fallbackObject) { + var _a; + const result = new Map(); + const sequenceParams = new Set(); + if ((_a = operationSpec.queryParameters) === null || _a === void 0 ? void 0 : _a.length) { + for (const queryParameter of operationSpec.queryParameters) { + if (queryParameter.mapper.type.name === "Sequence" && queryParameter.mapper.serializedName) { + sequenceParams.add(queryParameter.mapper.serializedName); + } + let queryParameterValue = getOperationArgumentValueFromParameter(operationArguments, queryParameter, fallbackObject); + if ((queryParameterValue !== undefined && queryParameterValue !== null) || + queryParameter.mapper.required) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter)); + const delimiter = queryParameter.collectionFormat + ? CollectionFormatToDelimiterMap[queryParameter.collectionFormat] + : ""; + if (Array.isArray(queryParameterValue)) { + // replace null and undefined + queryParameterValue = queryParameterValue.map((item) => { + if (item === null || item === undefined) { + return ""; + } + return item; + }); + } + if (queryParameter.collectionFormat === "Multi" && queryParameterValue.length === 0) { + continue; + } + else if (Array.isArray(queryParameterValue) && + (queryParameter.collectionFormat === "SSV" || queryParameter.collectionFormat === "TSV")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + queryParameterValue = queryParameterValue.map((item) => { + return encodeURIComponent(item); + }); + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + // Join pipes and CSV *after* encoding, or the server will be upset. + if (Array.isArray(queryParameterValue) && + (queryParameter.collectionFormat === "CSV" || queryParameter.collectionFormat === "Pipes")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + result.set(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + return { + queryParams: result, + sequenceParams, + }; +} +function simpleParseQueryParams(queryString) { + const result = new Map(); + if (!queryString || queryString[0] !== "?") { + return result; + } + // remove the leading ? + queryString = queryString.slice(1); + const pairs = queryString.split("&"); + for (const pair of pairs) { + const [name, value] = pair.split("=", 2); + const existingValue = result.get(name); + if (existingValue) { + if (Array.isArray(existingValue)) { + existingValue.push(value); + } + else { + result.set(name, [existingValue, value]); + } + } + else { + result.set(name, value); + } + } + return result; +} +/** @internal */ +export function appendQueryParams(url, queryParams, sequenceParams, noOverwrite = false) { + if (queryParams.size === 0) { + return url; + } + const parsedUrl = new URL(url); + // QUIRK: parsedUrl.searchParams will have their name/value pairs decoded, which + // can change their meaning to the server, such as in the case of a SAS signature. + // To avoid accidentally un-encoding a query param, we parse the key/values ourselves + const combinedParams = simpleParseQueryParams(parsedUrl.search); + for (const [name, value] of queryParams) { + const existingValue = combinedParams.get(name); + if (Array.isArray(existingValue)) { + if (Array.isArray(value)) { + existingValue.push(...value); + const valueSet = new Set(existingValue); + combinedParams.set(name, Array.from(valueSet)); + } + else { + existingValue.push(value); + } + } + else if (existingValue) { + if (Array.isArray(value)) { + value.unshift(existingValue); + } + else if (sequenceParams.has(name)) { + combinedParams.set(name, [existingValue, value]); + } + if (!noOverwrite) { + combinedParams.set(name, value); + } + } + else { + combinedParams.set(name, value); + } + } + const searchPieces = []; + for (const [name, value] of combinedParams) { + if (typeof value === "string") { + searchPieces.push(`${name}=${value}`); + } + else if (Array.isArray(value)) { + // QUIRK: If we get an array of values, include multiple key/value pairs + for (const subValue of value) { + searchPieces.push(`${name}=${subValue}`); + } + } + else { + searchPieces.push(`${name}=${value}`); + } + } + // QUIRK: we have to set search manually as searchParams will encode comma when it shouldn't. + parsedUrl.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return parsedUrl.toString(); +} +//# sourceMappingURL=urlHelpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/esm/utils.js b/node_modules/@azure/core-client/dist/esm/utils.js new file mode 100644 index 000000000..7a6268631 --- /dev/null +++ b/node_modules/@azure/core-client/dist/esm/utils.js @@ -0,0 +1,115 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A type guard for a primitive response body. + * @param value - Value to test + * + * @internal + */ +export function isPrimitiveBody(value, mapperTypeName) { + return (mapperTypeName !== "Composite" && + mapperTypeName !== "Dictionary" && + (typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" || + (mapperTypeName === null || mapperTypeName === void 0 ? void 0 : mapperTypeName.match(/^(Date|DateTime|DateTimeRfc1123|UnixTime|ByteArray|Base64Url)$/i)) !== + null || + value === undefined || + value === null)); +} +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Returns true if the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @internal + */ +export function isDuration(value) { + return validateISODuration.test(value); +} +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * Returns true if the provided uuid is valid. + * + * @param uuid - The uuid that needs to be validated. + * + * @internal + */ +export function isValidUuid(uuid) { + return validUuidRegex.test(uuid); +} +/** + * Maps the response as follows: + * - wraps the response body if needed (typically if its type is primitive). + * - returns null if the combination of the headers and the body is empty. + * - otherwise, returns the combination of the headers and the body. + * + * @param responseObject - a representation of the parsed response + * @returns the response that will be returned to the user which can be null and/or wrapped + * + * @internal + */ +function handleNullableResponseAndWrappableBody(responseObject) { + const combinedHeadersAndBody = Object.assign(Object.assign({}, responseObject.headers), responseObject.body); + if (responseObject.hasNullableType && + Object.getOwnPropertyNames(combinedHeadersAndBody).length === 0) { + return responseObject.shouldWrapBody ? { body: null } : null; + } + else { + return responseObject.shouldWrapBody + ? Object.assign(Object.assign({}, responseObject.headers), { body: responseObject.body }) : combinedHeadersAndBody; + } +} +/** + * Take a `FullOperationResponse` and turn it into a flat + * response object to hand back to the consumer. + * @param fullResponse - The processed response from the operation request + * @param responseSpec - The response map from the OperationSpec + * + * @internal + */ +export function flattenResponse(fullResponse, responseSpec) { + var _a, _b; + const parsedHeaders = fullResponse.parsedHeaders; + // head methods never have a body, but we return a boolean set to body property + // to indicate presence/absence of the resource + if (fullResponse.request.method === "HEAD") { + return Object.assign(Object.assign({}, parsedHeaders), { body: fullResponse.parsedBody }); + } + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const isNullable = Boolean(bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.nullable); + const expectedBodyTypeName = bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.type.name; + /** If the body is asked for, we look at the expected body type to handle it */ + if (expectedBodyTypeName === "Stream") { + return Object.assign(Object.assign({}, parsedHeaders), { blobBody: fullResponse.blobBody, readableStreamBody: fullResponse.readableStreamBody }); + } + const modelProperties = (expectedBodyTypeName === "Composite" && + bodyMapper.type.modelProperties) || + {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (expectedBodyTypeName === "Sequence" || isPageableResponse) { + const arrayResponse = (_a = fullResponse.parsedBody) !== null && _a !== void 0 ? _a : []; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = (_b = fullResponse.parsedBody) === null || _b === void 0 ? void 0 : _b[key]; + } + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + return isNullable && + !fullResponse.parsedBody && + !parsedHeaders && + Object.getOwnPropertyNames(modelProperties).length === 0 + ? null + : arrayResponse; + } + return handleNullableResponseAndWrappableBody({ + body: fullResponse.parsedBody, + headers: parsedHeaders, + hasNullableType: isNullable, + shouldWrapBody: isPrimitiveBody(fullResponse.parsedBody, expectedBodyTypeName), + }); +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/authorizeRequestOnClaimChallenge.js b/node_modules/@azure/core-client/dist/react-native/authorizeRequestOnClaimChallenge.js new file mode 100644 index 000000000..9de3e8b83 --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/authorizeRequestOnClaimChallenge.js @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { logger as coreClientLogger } from "./log.js"; +import { decodeStringToString } from "./base64.js"; +/** + * Converts: `Bearer a="b", c="d", Bearer d="e", f="g"`. + * Into: `[ { a: 'b', c: 'd' }, { d: 'e', f: 'g' } ]`. + * + * @internal + */ +export function parseCAEChallenge(challenges) { + const bearerChallenges = `, ${challenges.trim()}`.split(", Bearer ").filter((x) => x); + return bearerChallenges.map((challenge) => { + const challengeParts = `${challenge.trim()}, `.split('", ').filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split('="'))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); + }); +} +/** + * This function can be used as a callback for the `bearerTokenAuthenticationPolicy` of `@azure/core-rest-pipeline`, to support CAE challenges: + * [Continuous Access Evaluation](https://docs.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation). + * + * Call the `bearerTokenAuthenticationPolicy` with the following options: + * + * ```ts + * import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline"; + * import { authorizeRequestOnClaimChallenge } from "@azure/core-client"; + * + * const bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy({ + * authorizeRequestOnChallenge: authorizeRequestOnClaimChallenge + * }); + * ``` + * + * Once provided, the `bearerTokenAuthenticationPolicy` policy will internally handle Continuous Access Evaluation (CAE) challenges. + * When it can't complete a challenge it will return the 401 (unauthorized) response from ARM. + * + * Example challenge with claims: + * + * ``` + * Bearer authorization_uri="https://login.windows-ppe.net/", error="invalid_token", + * error_description="User session has been revoked", + * claims="eyJhY2Nlc3NfdG9rZW4iOnsibmJmIjp7ImVzc2VudGlhbCI6dHJ1ZSwgInZhbHVlIjoiMTYwMzc0MjgwMCJ9fX0=" + * ``` + */ +export async function authorizeRequestOnClaimChallenge(onChallengeOptions) { + const { scopes, response } = onChallengeOptions; + const logger = onChallengeOptions.logger || coreClientLogger; + const challenge = response.headers.get("WWW-Authenticate"); + if (!challenge) { + logger.info(`The WWW-Authenticate header was missing. Failed to perform the Continuous Access Evaluation authentication flow.`); + return false; + } + const challenges = parseCAEChallenge(challenge) || []; + const parsedChallenge = challenges.find((x) => x.claims); + if (!parsedChallenge) { + logger.info(`The WWW-Authenticate header was missing the necessary "claims" to perform the Continuous Access Evaluation authentication flow.`); + return false; + } + const accessToken = await onChallengeOptions.getAccessToken(parsedChallenge.scope ? [parsedChallenge.scope] : scopes, { + claims: decodeStringToString(parsedChallenge.claims), + }); + if (!accessToken) { + return false; + } + onChallengeOptions.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + return true; +} +//# sourceMappingURL=authorizeRequestOnClaimChallenge.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/authorizeRequestOnTenantChallenge.js b/node_modules/@azure/core-client/dist/react-native/authorizeRequestOnTenantChallenge.js new file mode 100644 index 000000000..0943fcede --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/authorizeRequestOnTenantChallenge.js @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +function isUuid(text) { + return /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/.test(text); +} +/** + * Defines a callback to handle auth challenge for Storage APIs. + * This implements the bearer challenge process described here: https://docs.microsoft.com/rest/api/storageservices/authorize-with-azure-active-directory#bearer-challenge + * Handling has specific features for storage that departs to the general AAD challenge docs. + **/ +export const authorizeRequestOnTenantChallenge = async (challengeOptions) => { + const requestOptions = requestToOptions(challengeOptions.request); + const challenge = getChallenge(challengeOptions.response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = buildScopes(challengeOptions, challengeInfo); + const tenantId = extractTenantId(challengeInfo); + if (!tenantId) { + return false; + } + const accessToken = await challengeOptions.getAccessToken(challengeScopes, Object.assign(Object.assign({}, requestOptions), { tenantId })); + if (!accessToken) { + return false; + } + challengeOptions.request.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${accessToken.token}`); + return true; + } + return false; +}; +/** + * Extracts the tenant id from the challenge information + * The tenant id is contained in the authorization_uri as the first + * path part. + */ +function extractTenantId(challengeInfo) { + const parsedAuthUri = new URL(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.pathname.split("/"); + const tenantId = pathSegments[1]; + if (tenantId && isUuid(tenantId)) { + return tenantId; + } + return undefined; +} +/** + * Builds the authentication scopes based on the information that comes in the + * challenge information. Scopes url is present in the resource_id, if it is empty + * we keep using the original scopes. + */ +function buildScopes(challengeOptions, challengeInfo) { + if (!challengeInfo.resource_id) { + return challengeOptions.scopes; + } + const challengeScopes = new URL(challengeInfo.resource_id); + challengeScopes.pathname = Constants.DefaultScope; + let scope = challengeScopes.toString(); + if (scope === "https://disk.azure.com/.default") { + // the extra slash is required by the service + scope = "https://disk.azure.com//.default"; + } + return [scope]; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +/** + * Extracts the options form a Pipeline Request for later re-use + */ +function requestToOptions(request) { + return { + abortSignal: request.abortSignal, + requestOptions: { + timeout: request.timeout, + }, + tracingOptions: request.tracingOptions, + }; +} +//# sourceMappingURL=authorizeRequestOnTenantChallenge.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/base64.js b/node_modules/@azure/core-client/dist/react-native/base64.js new file mode 100644 index 000000000..f1d3e1e7c --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/base64.js @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Encodes a string in base64 format. + * @param value - the string to encode + * @internal + */ +export function encodeString(value) { + return Buffer.from(value).toString("base64"); +} +/** + * Encodes a byte array in base64 format. + * @param value - the Uint8Aray to encode + * @internal + */ +export function encodeByteArray(value) { + const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); + return bufferValue.toString("base64"); +} +/** + * Decodes a base64 string into a byte array. + * @param value - the base64 string to decode + * @internal + */ +export function decodeString(value) { + return Buffer.from(value, "base64"); +} +/** + * Decodes a base64 string into a string. + * @param value - the base64 string to decode + * @internal + */ +export function decodeStringToString(value) { + return Buffer.from(value, "base64").toString(); +} +//# sourceMappingURL=base64.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/deserializationPolicy.js b/node_modules/@azure/core-client/dist/react-native/deserializationPolicy.js new file mode 100644 index 000000000..bdda3cd33 --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/deserializationPolicy.js @@ -0,0 +1,231 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { XML_CHARKEY, } from "./interfaces.js"; +import { RestError, } from "@azure/core-rest-pipeline"; +import { MapperTypeNames } from "./serializer.js"; +import { getOperationRequestInfo } from "./operationHelpers.js"; +const defaultJsonContentTypes = ["application/json", "text/json"]; +const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; +/** + * The programmatic identifier of the deserializationPolicy. + */ +export const deserializationPolicyName = "deserializationPolicy"; +/** + * This policy handles parsing out responses according to OperationSpecs on the request. + */ +export function deserializationPolicy(options = {}) { + var _a, _b, _c, _d, _e, _f, _g; + const jsonContentTypes = (_b = (_a = options.expectedContentTypes) === null || _a === void 0 ? void 0 : _a.json) !== null && _b !== void 0 ? _b : defaultJsonContentTypes; + const xmlContentTypes = (_d = (_c = options.expectedContentTypes) === null || _c === void 0 ? void 0 : _c.xml) !== null && _d !== void 0 ? _d : defaultXmlContentTypes; + const parseXML = options.parseXML; + const serializerOptions = options.serializerOptions; + const updatedOptions = { + xml: { + rootName: (_e = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _e !== void 0 ? _e : "", + includeRoot: (_f = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _f !== void 0 ? _f : false, + xmlCharKey: (_g = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _g !== void 0 ? _g : XML_CHARKEY, + }, + }; + return { + name: deserializationPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, updatedOptions, parseXML); + }, + }; +} +function getOperationResponseMap(parsedResponse) { + let result; + const request = parsedResponse.request; + const operationInfo = getOperationRequestInfo(request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + if (operationSpec) { + if (!(operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter)) { + result = operationSpec.responses[parsedResponse.status]; + } + else { + result = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter(operationSpec, parsedResponse); + } + } + return result; +} +function shouldDeserializeResponse(parsedResponse) { + const request = parsedResponse.request; + const operationInfo = getOperationRequestInfo(request); + const shouldDeserialize = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.shouldDeserialize; + let result; + if (shouldDeserialize === undefined) { + result = true; + } + else if (typeof shouldDeserialize === "boolean") { + result = shouldDeserialize; + } + else { + result = shouldDeserialize(parsedResponse); + } + return result; +} +async function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options, parseXML) { + const parsedResponse = await parse(jsonContentTypes, xmlContentTypes, response, options, parseXML); + if (!shouldDeserializeResponse(parsedResponse)) { + return parsedResponse; + } + const operationInfo = getOperationRequestInfo(parsedResponse.request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + if (!operationSpec || !operationSpec.responses) { + return parsedResponse; + } + const responseSpec = getOperationResponseMap(parsedResponse); + const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec, options); + if (error) { + throw error; + } + else if (shouldReturnResponse) { + return parsedResponse; + } + // An operation response spec does exist for current status code, so + // use it to deserialize the response. + if (responseSpec) { + if (responseSpec.bodyMapper) { + let valueToDeserialize = parsedResponse.parsedBody; + if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperTypeNames.Sequence) { + valueToDeserialize = + typeof valueToDeserialize === "object" + ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] + : []; + } + try { + parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); + } + catch (deserializeError) { + const restError = new RestError(`Error ${deserializeError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse, + }); + throw restError; + } + } + else if (operationSpec.httpMethod === "HEAD") { + // head methods never have a body, but we return a boolean to indicate presence/absence of the resource + parsedResponse.parsedBody = response.status >= 200 && response.status < 300; + } + if (responseSpec.headersMapper) { + parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders", { xml: {}, ignoreUnknownProperties: true }); + } + } + return parsedResponse; +} +function isOperationSpecEmpty(operationSpec) { + const expectedStatusCodes = Object.keys(operationSpec.responses); + return (expectedStatusCodes.length === 0 || + (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); +} +function handleErrorResponse(parsedResponse, operationSpec, responseSpec, options) { + var _a; + const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; + const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) + ? isSuccessByStatus + : !!responseSpec; + if (isExpectedStatusCode) { + if (responseSpec) { + if (!responseSpec.isError) { + return { error: null, shouldReturnResponse: false }; + } + } + else { + return { error: null, shouldReturnResponse: false }; + } + } + const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; + const initialErrorMessage = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) + ? `Unexpected status code: ${parsedResponse.status}` + : parsedResponse.bodyAsText; + const error = new RestError(initialErrorMessage, { + statusCode: parsedResponse.status, + request: parsedResponse.request, + response: parsedResponse, + }); + // If the item failed but there's no error spec or default spec to deserialize the error, + // we should fail so we just throw the parsed response + if (!errorResponseSpec) { + throw error; + } + const defaultBodyMapper = errorResponseSpec.bodyMapper; + const defaultHeadersMapper = errorResponseSpec.headersMapper; + try { + // If error response has a body, try to deserialize it using default body mapper. + // Then try to extract error code & message from it + if (parsedResponse.parsedBody) { + const parsedBody = parsedResponse.parsedBody; + let deserializedError; + if (defaultBodyMapper) { + let valueToDeserialize = parsedBody; + if (operationSpec.isXML && defaultBodyMapper.type.name === MapperTypeNames.Sequence) { + valueToDeserialize = []; + const elementName = defaultBodyMapper.xmlElementName; + if (typeof parsedBody === "object" && elementName) { + valueToDeserialize = parsedBody[elementName]; + } + } + deserializedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody", options); + } + const internalError = parsedBody.error || deserializedError || parsedBody; + error.code = internalError.code; + if (internalError.message) { + error.message = internalError.message; + } + if (defaultBodyMapper) { + error.response.parsedBody = deserializedError; + } + } + // If error response has headers, try to deserialize it using default header mapper + if (parsedResponse.headers && defaultHeadersMapper) { + error.response.parsedHeaders = + operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders"); + } + } + catch (defaultError) { + error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; + } + return { error, shouldReturnResponse: false }; +} +async function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts, parseXML) { + var _a; + if (!((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) && + operationResponse.bodyAsText) { + const text = operationResponse.bodyAsText; + const contentType = operationResponse.headers.get("Content-Type") || ""; + const contentComponents = !contentType + ? [] + : contentType.split(";").map((component) => component.toLowerCase()); + try { + if (contentComponents.length === 0 || + contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { + operationResponse.parsedBody = JSON.parse(text); + return operationResponse; + } + else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { + if (!parseXML) { + throw new Error("Parsing XML not supported."); + } + const body = await parseXML(text, opts.xml); + operationResponse.parsedBody = body; + return operationResponse; + } + } + catch (err) { + const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; + const errCode = err.code || RestError.PARSE_ERROR; + const e = new RestError(msg, { + code: errCode, + statusCode: operationResponse.status, + request: operationResponse.request, + response: operationResponse, + }); + throw e; + } + } + return operationResponse; +} +//# sourceMappingURL=deserializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/httpClientCache.js b/node_modules/@azure/core-client/dist/react-native/httpClientCache.js new file mode 100644 index 000000000..ad48a498c --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/httpClientCache.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createDefaultHttpClient } from "@azure/core-rest-pipeline"; +let cachedHttpClient; +export function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = createDefaultHttpClient(); + } + return cachedHttpClient; +} +//# sourceMappingURL=httpClientCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/index.js b/node_modules/@azure/core-client/dist/react-native/index.js new file mode 100644 index 000000000..3f91b1c44 --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/index.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { createSerializer, MapperTypeNames } from "./serializer.js"; +export { ServiceClient } from "./serviceClient.js"; +export { createClientPipeline } from "./pipeline.js"; +export { XML_ATTRKEY, XML_CHARKEY, } from "./interfaces.js"; +export { deserializationPolicy, deserializationPolicyName, } from "./deserializationPolicy.js"; +export { serializationPolicy, serializationPolicyName, } from "./serializationPolicy.js"; +export { authorizeRequestOnClaimChallenge } from "./authorizeRequestOnClaimChallenge.js"; +export { authorizeRequestOnTenantChallenge } from "./authorizeRequestOnTenantChallenge.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/interfaceHelpers.js b/node_modules/@azure/core-client/dist/react-native/interfaceHelpers.js new file mode 100644 index 000000000..80b8905e6 --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/interfaceHelpers.js @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { MapperTypeNames } from "./serializer.js"; +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +export function getStreamingResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperTypeNames.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + * @internal + */ +export function getPathStringFromParameter(parameter) { + const { parameterPath, mapper } = parameter; + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} +//# sourceMappingURL=interfaceHelpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/interfaces.js b/node_modules/@azure/core-client/dist/react-native/interfaces.js new file mode 100644 index 000000000..6a3bc345f --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/interfaces.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Default key used to access the XML attributes. + */ +export const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export const XML_CHARKEY = "_"; +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/log.js b/node_modules/@azure/core-client/dist/react-native/log.js new file mode 100644 index 000000000..9caaedac8 --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/log.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +export const logger = createClientLogger("core-client"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/operationHelpers.js b/node_modules/@azure/core-client/dist/react-native/operationHelpers.js new file mode 100644 index 000000000..942ba360f --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/operationHelpers.js @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { state } from "./state.js"; +/** + * @internal + * Retrieves the value to use for a given operation argument + * @param operationArguments - The arguments passed from the generated client + * @param parameter - The parameter description + * @param fallbackObject - If something isn't found in the arguments bag, look here. + * Generally used to look at the service client properties. + */ +export function getOperationArgumentValueFromParameter(operationArguments, parameter, fallbackObject) { + let parameterPath = parameter.parameterPath; + const parameterMapper = parameter.mapper; + let value; + if (typeof parameterPath === "string") { + parameterPath = [parameterPath]; + } + if (Array.isArray(parameterPath)) { + if (parameterPath.length > 0) { + if (parameterMapper.isConstant) { + value = parameterMapper.defaultValue; + } + else { + let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); + if (!propertySearchResult.propertyFound && fallbackObject) { + propertySearchResult = getPropertyFromParameterPath(fallbackObject, parameterPath); + } + let useDefaultValue = false; + if (!propertySearchResult.propertyFound) { + useDefaultValue = + parameterMapper.required || + (parameterPath[0] === "options" && parameterPath.length === 2); + } + value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; + } + } + } + else { + if (parameterMapper.required) { + value = {}; + } + for (const propertyName in parameterPath) { + const propertyMapper = parameterMapper.type.modelProperties[propertyName]; + const propertyPath = parameterPath[propertyName]; + const propertyValue = getOperationArgumentValueFromParameter(operationArguments, { + parameterPath: propertyPath, + mapper: propertyMapper, + }, fallbackObject); + if (propertyValue !== undefined) { + if (!value) { + value = {}; + } + value[propertyName] = propertyValue; + } + } + } + return value; +} +function getPropertyFromParameterPath(parent, parameterPath) { + const result = { propertyFound: false }; + let i = 0; + for (; i < parameterPath.length; ++i) { + const parameterPathPart = parameterPath[i]; + // Make sure to check inherited properties too, so don't use hasOwnProperty(). + if (parent && parameterPathPart in parent) { + parent = parent[parameterPathPart]; + } + else { + break; + } + } + if (i === parameterPath.length) { + result.propertyValue = parent; + result.propertyFound = true; + } + return result; +} +const originalRequestSymbol = Symbol.for("@azure/core-client original request"); +function hasOriginalRequest(request) { + return originalRequestSymbol in request; +} +export function getOperationRequestInfo(request) { + if (hasOriginalRequest(request)) { + return getOperationRequestInfo(request[originalRequestSymbol]); + } + let info = state.operationRequestMap.get(request); + if (!info) { + info = {}; + state.operationRequestMap.set(request, info); + } + return info; +} +//# sourceMappingURL=operationHelpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/package.json b/node_modules/@azure/core-client/dist/react-native/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-client/dist/react-native/pipeline.js b/node_modules/@azure/core-client/dist/react-native/pipeline.js new file mode 100644 index 000000000..b320ce619 --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/pipeline.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { deserializationPolicy } from "./deserializationPolicy.js"; +import { bearerTokenAuthenticationPolicy, createPipelineFromOptions, } from "@azure/core-rest-pipeline"; +import { serializationPolicy } from "./serializationPolicy.js"; +/** + * Creates a new Pipeline for use with a Service Client. + * Adds in deserializationPolicy by default. + * Also adds in bearerTokenAuthenticationPolicy if passed a TokenCredential. + * @param options - Options to customize the created pipeline. + */ +export function createClientPipeline(options = {}) { + const pipeline = createPipelineFromOptions(options !== null && options !== void 0 ? options : {}); + if (options.credentialOptions) { + pipeline.addPolicy(bearerTokenAuthenticationPolicy({ + credential: options.credentialOptions.credential, + scopes: options.credentialOptions.credentialScopes, + })); + } + pipeline.addPolicy(serializationPolicy(options.serializationOptions), { phase: "Serialize" }); + pipeline.addPolicy(deserializationPolicy(options.deserializationOptions), { + phase: "Deserialize", + }); + return pipeline; +} +//# sourceMappingURL=pipeline.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/serializationPolicy.js b/node_modules/@azure/core-client/dist/react-native/serializationPolicy.js new file mode 100644 index 000000000..a75aceba7 --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/serializationPolicy.js @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { XML_ATTRKEY, XML_CHARKEY, } from "./interfaces.js"; +import { getOperationArgumentValueFromParameter, getOperationRequestInfo, } from "./operationHelpers.js"; +import { MapperTypeNames } from "./serializer.js"; +import { getPathStringFromParameter } from "./interfaceHelpers.js"; +/** + * The programmatic identifier of the serializationPolicy. + */ +export const serializationPolicyName = "serializationPolicy"; +/** + * This policy handles assembling the request body and headers using + * an OperationSpec and OperationArguments on the request. + */ +export function serializationPolicy(options = {}) { + const stringifyXML = options.stringifyXML; + return { + name: serializationPolicyName, + async sendRequest(request, next) { + const operationInfo = getOperationRequestInfo(request); + const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; + const operationArguments = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationArguments; + if (operationSpec && operationArguments) { + serializeHeaders(request, operationArguments, operationSpec); + serializeRequestBody(request, operationArguments, operationSpec, stringifyXML); + } + return next(request); + }, + }; +} +/** + * @internal + */ +export function serializeHeaders(request, operationArguments, operationSpec) { + var _a, _b; + if (operationSpec.headerParameters) { + for (const headerParameter of operationSpec.headerParameters) { + let headerValue = getOperationArgumentValueFromParameter(operationArguments, headerParameter); + if ((headerValue !== null && headerValue !== undefined) || headerParameter.mapper.required) { + headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter)); + const headerCollectionPrefix = headerParameter.mapper + .headerCollectionPrefix; + if (headerCollectionPrefix) { + for (const key of Object.keys(headerValue)) { + request.headers.set(headerCollectionPrefix + key, headerValue[key]); + } + } + else { + request.headers.set(headerParameter.mapper.serializedName || getPathStringFromParameter(headerParameter), headerValue); + } + } + } + } + const customHeaders = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.requestOptions) === null || _b === void 0 ? void 0 : _b.customHeaders; + if (customHeaders) { + for (const customHeaderName of Object.keys(customHeaders)) { + request.headers.set(customHeaderName, customHeaders[customHeaderName]); + } + } +} +/** + * @internal + */ +export function serializeRequestBody(request, operationArguments, operationSpec, stringifyXML = function () { + throw new Error("XML serialization unsupported!"); +}) { + var _a, _b, _c, _d, _e; + const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; + const updatedOptions = { + xml: { + rootName: (_b = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _b !== void 0 ? _b : "", + includeRoot: (_c = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _c !== void 0 ? _c : false, + xmlCharKey: (_d = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _d !== void 0 ? _d : XML_CHARKEY, + }, + }; + const xmlCharKey = updatedOptions.xml.xmlCharKey; + if (operationSpec.requestBody && operationSpec.requestBody.mapper) { + request.body = getOperationArgumentValueFromParameter(operationArguments, operationSpec.requestBody); + const bodyMapper = operationSpec.requestBody.mapper; + const { required, serializedName, xmlName, xmlElementName, xmlNamespace, xmlNamespacePrefix, nullable, } = bodyMapper; + const typeName = bodyMapper.type.name; + try { + if ((request.body !== undefined && request.body !== null) || + (nullable && request.body === null) || + required) { + const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); + request.body = operationSpec.serializer.serialize(bodyMapper, request.body, requestBodyParameterPathString, updatedOptions); + const isStream = typeName === MapperTypeNames.Stream; + if (operationSpec.isXML) { + const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; + const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, request.body, updatedOptions); + if (typeName === MapperTypeNames.Sequence) { + request.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, xmlCharKey }); + } + else if (!isStream) { + request.body = stringifyXML(value, { + rootName: xmlName || serializedName, + xmlCharKey, + }); + } + } + else if (typeName === MapperTypeNames.String && + (((_e = operationSpec.contentType) === null || _e === void 0 ? void 0 : _e.match("text/plain")) || operationSpec.mediaType === "text")) { + // the String serializer has validated that request body is a string + // so just send the string. + return; + } + else if (!isStream) { + request.body = JSON.stringify(request.body); + } + } + } + catch (error) { + throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); + } + } + else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { + request.formData = {}; + for (const formDataParameter of operationSpec.formDataParameters) { + const formDataParameterValue = getOperationArgumentValueFromParameter(operationArguments, formDataParameter); + if (formDataParameterValue !== undefined && formDataParameterValue !== null) { + const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); + request.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); + } + } + } +} +/** + * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself + */ +function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { + // Composite and Sequence schemas already got their root namespace set during serialization + // We just need to add xmlns to the other schema types + if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; + return result; + } + return serializedValue; +} +function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { + if (!Array.isArray(obj)) { + obj = [obj]; + } + if (!xmlNamespaceKey || !xmlNamespace) { + return { [elementName]: obj }; + } + const result = { [elementName]: obj }; + result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; + return result; +} +//# sourceMappingURL=serializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/serializer.js b/node_modules/@azure/core-client/dist/react-native/serializer.js new file mode 100644 index 000000000..9dc240521 --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/serializer.js @@ -0,0 +1,922 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as base64 from "./base64.js"; +import { XML_ATTRKEY, XML_CHARKEY, } from "./interfaces.js"; +import { isDuration, isValidUuid } from "./utils.js"; +class SerializerImpl { + constructor(modelMappers = {}, isXML = false) { + this.modelMappers = modelMappers; + this.isXML = isXML; + } + /** + * @deprecated Removing the constraints validation on client side. + */ + validateConstraints(mapper, value, objectName) { + const failValidation = (constraintName, constraintValue) => { + throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); + }; + if (mapper.constraints && value !== undefined && value !== null) { + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; + if (ExclusiveMaximum !== undefined && value >= ExclusiveMaximum) { + failValidation("ExclusiveMaximum", ExclusiveMaximum); + } + if (ExclusiveMinimum !== undefined && value <= ExclusiveMinimum) { + failValidation("ExclusiveMinimum", ExclusiveMinimum); + } + if (InclusiveMaximum !== undefined && value > InclusiveMaximum) { + failValidation("InclusiveMaximum", InclusiveMaximum); + } + if (InclusiveMinimum !== undefined && value < InclusiveMinimum) { + failValidation("InclusiveMinimum", InclusiveMinimum); + } + if (MaxItems !== undefined && value.length > MaxItems) { + failValidation("MaxItems", MaxItems); + } + if (MaxLength !== undefined && value.length > MaxLength) { + failValidation("MaxLength", MaxLength); + } + if (MinItems !== undefined && value.length < MinItems) { + failValidation("MinItems", MinItems); + } + if (MinLength !== undefined && value.length < MinLength) { + failValidation("MinLength", MinLength); + } + if (MultipleOf !== undefined && value % MultipleOf !== 0) { + failValidation("MultipleOf", MultipleOf); + } + if (Pattern) { + const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; + if (typeof value !== "string" || value.match(pattern) === null) { + failValidation("Pattern", Pattern); + } + } + if (UniqueItems && + value.some((item, i, ar) => ar.indexOf(item) !== i)) { + failValidation("UniqueItems", UniqueItems); + } + } + } + /** + * Serialize the given object based on its metadata defined in the mapper + * + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param object - A valid Javascript object to be serialized + * + * @param objectName - Name of the serialized object + * + * @param options - additional options to serialization + * + * @returns A valid serialized Javascript object + */ + serialize(mapper, object, objectName, options = { xml: {} }) { + var _a, _b, _c; + const updatedOptions = { + xml: { + rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }, + }; + let payload = {}; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Sequence$/i) !== null) { + payload = []; + } + if (mapper.isConstant) { + object = mapper.defaultValue; + } + // This table of allowed values should help explain + // the mapper.required and mapper.nullable properties. + // X means "neither undefined or null are allowed". + // || required + // || true | false + // nullable || ========================== + // true || null | undefined/null + // false || X | undefined + // undefined || X | undefined/null + const { required, nullable } = mapper; + if (required && nullable && object === undefined) { + throw new Error(`${objectName} cannot be undefined.`); + } + if (required && !nullable && (object === undefined || object === null)) { + throw new Error(`${objectName} cannot be null or undefined.`); + } + if (!required && nullable === false && object === null) { + throw new Error(`${objectName} cannot be null.`); + } + if (object === undefined || object === null) { + payload = object; + } + else { + if (mapperType.match(/^any$/i) !== null) { + payload = object; + } + else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { + payload = serializeBasicTypes(mapperType, objectName, object); + } + else if (mapperType.match(/^Enum$/i) !== null) { + const enumMapper = mapper; + payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); + } + else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { + payload = serializeDateTypes(mapperType, object, objectName); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = serializeByteArrayType(objectName, object); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = serializeBase64UrlType(objectName, object); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + else if (mapperType.match(/^Composite$/i) !== null) { + payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); + } + } + return payload; + } + /** + * Deserialize the given object based on its metadata defined in the mapper + * + * @param mapper - The mapper which defines the metadata of the serializable object + * + * @param responseBody - A valid Javascript entity to be deserialized + * + * @param objectName - Name of the deserialized object + * + * @param options - Controls behavior of XML parser and builder. + * + * @returns A valid deserialized Javascript object + */ + deserialize(mapper, responseBody, objectName, options = { xml: {} }) { + var _a, _b, _c, _d; + const updatedOptions = { + xml: { + rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + }, + ignoreUnknownProperties: (_d = options.ignoreUnknownProperties) !== null && _d !== void 0 ? _d : false, + }; + if (responseBody === undefined || responseBody === null) { + if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { + // Edge case for empty XML non-wrapped lists. xml2js can't distinguish + // between the list being empty versus being missing, + // so let's do the more user-friendly thing and return an empty list. + responseBody = []; + } + // specifically check for undefined as default value can be a falsey value `0, "", false, null` + if (mapper.defaultValue !== undefined) { + responseBody = mapper.defaultValue; + } + return responseBody; + } + let payload; + const mapperType = mapper.type.name; + if (!objectName) { + objectName = mapper.serializedName; + } + if (mapperType.match(/^Composite$/i) !== null) { + payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); + } + else { + if (this.isXML) { + const xmlCharKey = updatedOptions.xml.xmlCharKey; + /** + * If the mapper specifies this as a non-composite type value but the responseBody contains + * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, + * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. + */ + if (responseBody[XML_ATTRKEY] !== undefined && responseBody[xmlCharKey] !== undefined) { + responseBody = responseBody[xmlCharKey]; + } + } + if (mapperType.match(/^Number$/i) !== null) { + payload = parseFloat(responseBody); + if (isNaN(payload)) { + payload = responseBody; + } + } + else if (mapperType.match(/^Boolean$/i) !== null) { + if (responseBody === "true") { + payload = true; + } + else if (responseBody === "false") { + payload = false; + } + else { + payload = responseBody; + } + } + else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { + payload = responseBody; + } + else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { + payload = new Date(responseBody); + } + else if (mapperType.match(/^UnixTime$/i) !== null) { + payload = unixTimeToDate(responseBody); + } + else if (mapperType.match(/^ByteArray$/i) !== null) { + payload = base64.decodeString(responseBody); + } + else if (mapperType.match(/^Base64Url$/i) !== null) { + payload = base64UrlToByteArray(responseBody); + } + else if (mapperType.match(/^Sequence$/i) !== null) { + payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); + } + else if (mapperType.match(/^Dictionary$/i) !== null) { + payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); + } + } + if (mapper.isConstant) { + payload = mapper.defaultValue; + } + return payload; + } +} +/** + * Method that creates and returns a Serializer. + * @param modelMappers - Known models to map + * @param isXML - If XML should be supported + */ +export function createSerializer(modelMappers = {}, isXML = false) { + return new SerializerImpl(modelMappers, isXML); +} +function trimEnd(str, ch) { + let len = str.length; + while (len - 1 >= 0 && str[len - 1] === ch) { + --len; + } + return str.substr(0, len); +} +function bufferToBase64Url(buffer) { + if (!buffer) { + return undefined; + } + if (!(buffer instanceof Uint8Array)) { + throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); + } + // Uint8Array to Base64. + const str = base64.encodeByteArray(buffer); + // Base64 to Base64Url. + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); +} +function base64UrlToByteArray(str) { + if (!str) { + return undefined; + } + if (str && typeof str.valueOf() !== "string") { + throw new Error("Please provide an input of type string for converting to Uint8Array"); + } + // Base64Url to Base64. + str = str.replace(/-/g, "+").replace(/_/g, "/"); + // Base64 to Uint8Array. + return base64.decodeString(str); +} +function splitSerializeName(prop) { + const classes = []; + let partialclass = ""; + if (prop) { + const subwords = prop.split("."); + for (const item of subwords) { + if (item.charAt(item.length - 1) === "\\") { + partialclass += item.substr(0, item.length - 1) + "."; + } + else { + partialclass += item; + classes.push(partialclass); + partialclass = ""; + } + } + } + return classes; +} +function dateToUnixTime(d) { + if (!d) { + return undefined; + } + if (typeof d.valueOf() === "string") { + d = new Date(d); + } + return Math.floor(d.getTime() / 1000); +} +function unixTimeToDate(n) { + if (!n) { + return undefined; + } + return new Date(n * 1000); +} +function serializeBasicTypes(typeName, objectName, value) { + if (value !== null && value !== undefined) { + if (typeName.match(/^Number$/i) !== null) { + if (typeof value !== "number") { + throw new Error(`${objectName} with value ${value} must be of type number.`); + } + } + else if (typeName.match(/^String$/i) !== null) { + if (typeof value.valueOf() !== "string") { + throw new Error(`${objectName} with value "${value}" must be of type string.`); + } + } + else if (typeName.match(/^Uuid$/i) !== null) { + if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { + throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); + } + } + else if (typeName.match(/^Boolean$/i) !== null) { + if (typeof value !== "boolean") { + throw new Error(`${objectName} with value ${value} must be of type boolean.`); + } + } + else if (typeName.match(/^Stream$/i) !== null) { + const objectType = typeof value; + if (objectType !== "string" && + typeof value.pipe !== "function" && // NodeJS.ReadableStream + typeof value.tee !== "function" && // browser ReadableStream + !(value instanceof ArrayBuffer) && + !ArrayBuffer.isView(value) && + // File objects count as a type of Blob, so we want to use instanceof explicitly + !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob) && + objectType !== "function") { + throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, ReadableStream, or () => ReadableStream.`); + } + } + } + return value; +} +function serializeEnumType(objectName, allowedValues, value) { + if (!allowedValues) { + throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); + } + const isPresent = allowedValues.some((item) => { + if (typeof item.valueOf() === "string") { + return item.toLowerCase() === value.toLowerCase(); + } + return item === value; + }); + if (!isPresent) { + throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); + } + return value; +} +function serializeByteArrayType(objectName, value) { + if (value !== undefined && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = base64.encodeByteArray(value); + } + return value; +} +function serializeBase64UrlType(objectName, value) { + if (value !== undefined && value !== null) { + if (!(value instanceof Uint8Array)) { + throw new Error(`${objectName} must be of type Uint8Array.`); + } + value = bufferToBase64Url(value); + } + return value; +} +function serializeDateTypes(typeName, value, objectName) { + if (value !== undefined && value !== null) { + if (typeName.match(/^Date$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = + value instanceof Date + ? value.toISOString().substring(0, 10) + : new Date(value).toISOString().substring(0, 10); + } + else if (typeName.match(/^DateTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); + } + value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); + } + else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); + } + value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); + } + else if (typeName.match(/^UnixTime$/i) !== null) { + if (!(value instanceof Date || + (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { + throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + + `for it to be serialized in UnixTime/Epoch format.`); + } + value = dateToUnixTime(value); + } + else if (typeName.match(/^TimeSpan$/i) !== null) { + if (!isDuration(value)) { + throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); + } + } + } + return value; +} +function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { + var _a; + if (!Array.isArray(object)) { + throw new Error(`${objectName} must be of type Array.`); + } + let elementType = mapper.type.element; + if (!elementType || typeof elementType !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + // Quirk: Composite mappers referenced by `element` might + // not have *all* properties declared (like uberParent), + // so let's try to look up the full definition by name. + if (elementType.type.name === "Composite" && elementType.type.className) { + elementType = (_a = serializer.modelMappers[elementType.type.className]) !== null && _a !== void 0 ? _a : elementType; + } + const tempArray = []; + for (let i = 0; i < object.length; i++) { + const serializedValue = serializer.serialize(elementType, object[i], objectName, options); + if (isXml && elementType.xmlNamespace) { + const xmlnsKey = elementType.xmlNamespacePrefix + ? `xmlns:${elementType.xmlNamespacePrefix}` + : "xmlns"; + if (elementType.type.name === "Composite") { + tempArray[i] = Object.assign({}, serializedValue); + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + else { + tempArray[i] = {}; + tempArray[i][options.xml.xmlCharKey] = serializedValue; + tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; + } + } + else { + tempArray[i] = serializedValue; + } + } + return tempArray; +} +function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { + if (typeof object !== "object") { + throw new Error(`${objectName} must be of type object.`); + } + const valueType = mapper.type.value; + if (!valueType || typeof valueType !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}.`); + } + const tempDictionary = {}; + for (const key of Object.keys(object)) { + const serializedValue = serializer.serialize(valueType, object[key], objectName, options); + // If the element needs an XML namespace we need to add it within the $ property + tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); + } + // Add the namespace to the root element if needed + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; + const result = tempDictionary; + result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; + return result; + } + return tempDictionary; +} +/** + * Resolves the additionalProperties property from a referenced mapper + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + * @param objectName - name of the object being serialized + */ +function resolveAdditionalProperties(serializer, mapper, objectName) { + const additionalProperties = mapper.type.additionalProperties; + if (!additionalProperties && mapper.type.className) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; + } + return additionalProperties; +} +/** + * Finds the mapper referenced by className + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + * @param objectName - name of the object being serialized + */ +function resolveReferencedMapper(serializer, mapper, objectName) { + const className = mapper.type.className; + if (!className) { + throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); + } + return serializer.modelMappers[className]; +} +/** + * Resolves a composite mapper's modelProperties. + * @param serializer - the serializer containing the entire set of mappers + * @param mapper - the composite mapper to resolve + */ +function resolveModelProperties(serializer, mapper, objectName) { + let modelProps = mapper.type.modelProperties; + if (!modelProps) { + const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); + if (!modelMapper) { + throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); + } + modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; + if (!modelProps) { + throw new Error(`modelProperties cannot be null or undefined in the ` + + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); + } + } + return modelProps; +} +function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); + } + if (object !== undefined && object !== null) { + const payload = {}; + const modelProps = resolveModelProperties(serializer, mapper, objectName); + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + if (propertyMapper.readOnly) { + continue; + } + let propName; + let parentObject = payload; + if (serializer.isXML) { + if (propertyMapper.xmlIsWrapped) { + propName = propertyMapper.xmlName; + } + else { + propName = propertyMapper.xmlElementName || propertyMapper.xmlName; + } + } + else { + const paths = splitSerializeName(propertyMapper.serializedName); + propName = paths.pop(); + for (const pathName of paths) { + const childObject = parentObject[pathName]; + if ((childObject === undefined || childObject === null) && + ((object[key] !== undefined && object[key] !== null) || + propertyMapper.defaultValue !== undefined)) { + parentObject[pathName] = {}; + } + parentObject = parentObject[pathName]; + } + } + if (parentObject !== undefined && parentObject !== null) { + if (isXml && mapper.xmlNamespace) { + const xmlnsKey = mapper.xmlNamespacePrefix + ? `xmlns:${mapper.xmlNamespacePrefix}` + : "xmlns"; + parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); + } + const propertyObjectName = propertyMapper.serializedName !== "" + ? objectName + "." + propertyMapper.serializedName + : objectName; + let toSerialize = object[key]; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator && + polymorphicDiscriminator.clientName === key && + (toSerialize === undefined || toSerialize === null)) { + toSerialize = mapper.serializedName; + } + const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); + if (serializedValue !== undefined && propName !== undefined && propName !== null) { + const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); + if (isXml && propertyMapper.xmlIsAttribute) { + // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. + // This keeps things simple while preventing name collision + // with names in user documents. + parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; + parentObject[XML_ATTRKEY][propName] = serializedValue; + } + else if (isXml && propertyMapper.xmlIsWrapped) { + parentObject[propName] = { [propertyMapper.xmlElementName]: value }; + } + else { + parentObject[propName] = value; + } + } + } + } + const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); + if (additionalPropertiesMapper) { + const propNames = Object.keys(modelProps); + for (const clientPropName in object) { + const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); + if (isAdditionalProperty) { + payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); + } + } + } + return payload; + } + return object; +} +function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { + if (!isXml || !propertyMapper.xmlNamespace) { + return serializedValue; + } + const xmlnsKey = propertyMapper.xmlNamespacePrefix + ? `xmlns:${propertyMapper.xmlNamespacePrefix}` + : "xmlns"; + const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; + if (["Composite"].includes(propertyMapper.type.name)) { + if (serializedValue[XML_ATTRKEY]) { + return serializedValue; + } + else { + const result = Object.assign({}, serializedValue); + result[XML_ATTRKEY] = xmlNamespace; + return result; + } + } + const result = {}; + result[options.xml.xmlCharKey] = serializedValue; + result[XML_ATTRKEY] = xmlNamespace; + return result; +} +function isSpecialXmlProperty(propertyName, options) { + return [XML_ATTRKEY, options.xml.xmlCharKey].includes(propertyName); +} +function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { + var _a, _b; + const xmlCharKey = (_a = options.xml.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; + if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { + mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); + } + const modelProps = resolveModelProperties(serializer, mapper, objectName); + let instance = {}; + const handledPropertyNames = []; + for (const key of Object.keys(modelProps)) { + const propertyMapper = modelProps[key]; + const paths = splitSerializeName(modelProps[key].serializedName); + handledPropertyNames.push(paths[0]); + const { serializedName, xmlName, xmlElementName } = propertyMapper; + let propertyObjectName = objectName; + if (serializedName !== "" && serializedName !== undefined) { + propertyObjectName = objectName + "." + serializedName; + } + const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; + if (headerCollectionPrefix) { + const dictionary = {}; + for (const headerKey of Object.keys(responseBody)) { + if (headerKey.startsWith(headerCollectionPrefix)) { + dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); + } + handledPropertyNames.push(headerKey); + } + instance[key] = dictionary; + } + else if (serializer.isXML) { + if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { + instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); + } + else if (propertyMapper.xmlIsMsText) { + if (responseBody[xmlCharKey] !== undefined) { + instance[key] = responseBody[xmlCharKey]; + } + else if (typeof responseBody === "string") { + // The special case where xml parser parses "content" into JSON of + // `{ name: "content"}` instead of `{ name: { "_": "content" }}` + instance[key] = responseBody; + } + } + else { + const propertyName = xmlElementName || xmlName || serializedName; + if (propertyMapper.xmlIsWrapped) { + /* a list of wrapped by + For the xml example below + + ... + ... + + the responseBody has + { + Cors: { + CorsRule: [{...}, {...}] + } + } + xmlName is "Cors" and xmlElementName is"CorsRule". + */ + const wrapped = responseBody[xmlName]; + const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; + instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); + handledPropertyNames.push(xmlName); + } + else { + const property = responseBody[propertyName]; + instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); + handledPropertyNames.push(propertyName); + } + } + } + else { + // deserialize the property if it is present in the provided responseBody instance + let propertyInstance; + let res = responseBody; + // traversing the object step by step. + let steps = 0; + for (const item of paths) { + if (!res) + break; + steps++; + res = res[item]; + } + // only accept null when reaching the last position of object otherwise it would be undefined + if (res === null && steps < paths.length) { + res = undefined; + } + propertyInstance = res; + const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; + // checking that the model property name (key)(ex: "fishtype") and the + // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") + // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") + // is a better approach. The generator is not consistent with escaping '\.' in the + // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator + // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, + // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and + // the transformation of model property name (ex: "fishtype") is done consistently. + // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. + if (polymorphicDiscriminator && + key === polymorphicDiscriminator.clientName && + (propertyInstance === undefined || propertyInstance === null)) { + propertyInstance = mapper.serializedName; + } + let serializedValue; + // paging + if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { + propertyInstance = responseBody[key]; + const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + // Copy over any properties that have already been added into the instance, where they do + // not exist on the newly de-serialized array + for (const [k, v] of Object.entries(instance)) { + if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { + arrayInstance[k] = v; + } + } + instance = arrayInstance; + } + else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { + serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); + instance[key] = serializedValue; + } + } + } + const additionalPropertiesMapper = mapper.type.additionalProperties; + if (additionalPropertiesMapper) { + const isAdditionalProperty = (responsePropName) => { + for (const clientPropName in modelProps) { + const paths = splitSerializeName(modelProps[clientPropName].serializedName); + if (paths[0] === responsePropName) { + return false; + } + } + return true; + }; + for (const responsePropName in responseBody) { + if (isAdditionalProperty(responsePropName)) { + instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); + } + } + } + else if (responseBody && !options.ignoreUnknownProperties) { + for (const key of Object.keys(responseBody)) { + if (instance[key] === undefined && + !handledPropertyNames.includes(key) && + !isSpecialXmlProperty(key, options)) { + instance[key] = responseBody[key]; + } + } + } + return instance; +} +function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { + /* jshint validthis: true */ + const value = mapper.type.value; + if (!value || typeof value !== "object") { + throw new Error(`"value" metadata for a Dictionary must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + const tempDictionary = {}; + for (const key of Object.keys(responseBody)) { + tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); + } + return tempDictionary; + } + return responseBody; +} +function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { + var _a; + let element = mapper.type.element; + if (!element || typeof element !== "object") { + throw new Error(`element" metadata for an Array must be defined in the ` + + `mapper and it must of type "object" in ${objectName}`); + } + if (responseBody) { + if (!Array.isArray(responseBody)) { + // xml2js will interpret a single element array as just the element, so force it to be an array + responseBody = [responseBody]; + } + // Quirk: Composite mappers referenced by `element` might + // not have *all* properties declared (like uberParent), + // so let's try to look up the full definition by name. + if (element.type.name === "Composite" && element.type.className) { + element = (_a = serializer.modelMappers[element.type.className]) !== null && _a !== void 0 ? _a : element; + } + const tempArray = []; + for (let i = 0; i < responseBody.length; i++) { + tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); + } + return tempArray; + } + return responseBody; +} +function getIndexDiscriminator(discriminators, discriminatorValue, typeName) { + const typeNamesToCheck = [typeName]; + while (typeNamesToCheck.length) { + const currentName = typeNamesToCheck.shift(); + const indexDiscriminator = discriminatorValue === currentName + ? discriminatorValue + : currentName + "." + discriminatorValue; + if (Object.prototype.hasOwnProperty.call(discriminators, indexDiscriminator)) { + return discriminators[indexDiscriminator]; + } + else { + for (const [name, mapper] of Object.entries(discriminators)) { + if (name.startsWith(currentName + ".") && + mapper.type.uberParent === currentName && + mapper.type.className) { + typeNamesToCheck.push(mapper.type.className); + } + } + } + } + return undefined; +} +function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { + var _a; + const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); + if (polymorphicDiscriminator) { + let discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; + if (discriminatorName) { + // The serializedName might have \\, which we just want to ignore + if (polymorphicPropertyName === "serializedName") { + discriminatorName = discriminatorName.replace(/\\/gi, ""); + } + const discriminatorValue = object[discriminatorName]; + const typeName = (_a = mapper.type.uberParent) !== null && _a !== void 0 ? _a : mapper.type.className; + if (typeof discriminatorValue === "string" && typeName) { + const polymorphicMapper = getIndexDiscriminator(serializer.modelMappers.discriminators, discriminatorValue, typeName); + if (polymorphicMapper) { + mapper = polymorphicMapper; + } + } + } + } + return mapper; +} +function getPolymorphicDiscriminatorRecursively(serializer, mapper) { + return (mapper.type.polymorphicDiscriminator || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || + getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); +} +function getPolymorphicDiscriminatorSafely(serializer, typeName) { + return (typeName && + serializer.modelMappers[typeName] && + serializer.modelMappers[typeName].type.polymorphicDiscriminator); +} +/** + * Known types of Mappers + */ +export const MapperTypeNames = { + Base64Url: "Base64Url", + Boolean: "Boolean", + ByteArray: "ByteArray", + Composite: "Composite", + Date: "Date", + DateTime: "DateTime", + DateTimeRfc1123: "DateTimeRfc1123", + Dictionary: "Dictionary", + Enum: "Enum", + Number: "Number", + Object: "Object", + Sequence: "Sequence", + String: "String", + Stream: "Stream", + TimeSpan: "TimeSpan", + UnixTime: "UnixTime", +}; +//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/serviceClient.js b/node_modules/@azure/core-client/dist/react-native/serviceClient.js new file mode 100644 index 000000000..eccd0d8cf --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/serviceClient.js @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createPipelineRequest, } from "@azure/core-rest-pipeline"; +import { createClientPipeline } from "./pipeline.js"; +import { flattenResponse } from "./utils.js"; +import { getCachedDefaultHttpClient } from "./httpClientCache.js"; +import { getOperationRequestInfo } from "./operationHelpers.js"; +import { getRequestUrl } from "./urlHelpers.js"; +import { getStreamingResponseStatusCodes } from "./interfaceHelpers.js"; +import { logger } from "./log.js"; +/** + * Initializes a new instance of the ServiceClient. + */ +export class ServiceClient { + /** + * The ServiceClient constructor + * @param credential - The credentials used for authentication with the service. + * @param options - The service client options that govern the behavior of the client. + */ + constructor(options = {}) { + var _a, _b; + this._requestContentType = options.requestContentType; + this._endpoint = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri; + if (options.baseUri) { + logger.warning("The baseUri option for SDK Clients has been deprecated, please use endpoint instead."); + } + this._allowInsecureConnection = options.allowInsecureConnection; + this._httpClient = options.httpClient || getCachedDefaultHttpClient(); + this.pipeline = options.pipeline || createDefaultPipeline(options); + if ((_b = options.additionalPolicies) === null || _b === void 0 ? void 0 : _b.length) { + for (const { policy, position } of options.additionalPolicies) { + // Sign happens after Retry and is commonly needed to occur + // before policies that intercept post-retry. + const afterPhase = position === "perRetry" ? "Sign" : undefined; + this.pipeline.addPolicy(policy, { + afterPhase, + }); + } + } + } + /** + * Send the provided httpRequest. + */ + async sendRequest(request) { + return this.pipeline.sendRequest(this._httpClient, request); + } + /** + * Send an HTTP request that is populated using the provided OperationSpec. + * @typeParam T - The typed result of the request, based on the OperationSpec. + * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. + * @param operationSpec - The OperationSpec to use to populate the httpRequest. + */ + async sendOperationRequest(operationArguments, operationSpec) { + const endpoint = operationSpec.baseUrl || this._endpoint; + if (!endpoint) { + throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a endpoint string property that contains the base URL to use."); + } + // Templatized URLs sometimes reference properties on the ServiceClient child class, + // so we have to pass `this` below in order to search these properties if they're + // not part of OperationArguments + const url = getRequestUrl(endpoint, operationSpec, operationArguments, this); + const request = createPipelineRequest({ + url, + }); + request.method = operationSpec.httpMethod; + const operationInfo = getOperationRequestInfo(request); + operationInfo.operationSpec = operationSpec; + operationInfo.operationArguments = operationArguments; + const contentType = operationSpec.contentType || this._requestContentType; + if (contentType && operationSpec.requestBody) { + request.headers.set("Content-Type", contentType); + } + const options = operationArguments.options; + if (options) { + const requestOptions = options.requestOptions; + if (requestOptions) { + if (requestOptions.timeout) { + request.timeout = requestOptions.timeout; + } + if (requestOptions.onUploadProgress) { + request.onUploadProgress = requestOptions.onUploadProgress; + } + if (requestOptions.onDownloadProgress) { + request.onDownloadProgress = requestOptions.onDownloadProgress; + } + if (requestOptions.shouldDeserialize !== undefined) { + operationInfo.shouldDeserialize = requestOptions.shouldDeserialize; + } + if (requestOptions.allowInsecureConnection) { + request.allowInsecureConnection = true; + } + } + if (options.abortSignal) { + request.abortSignal = options.abortSignal; + } + if (options.tracingOptions) { + request.tracingOptions = options.tracingOptions; + } + } + if (this._allowInsecureConnection) { + request.allowInsecureConnection = true; + } + if (request.streamResponseStatusCodes === undefined) { + request.streamResponseStatusCodes = getStreamingResponseStatusCodes(operationSpec); + } + try { + const rawResponse = await this.sendRequest(request); + const flatResponse = flattenResponse(rawResponse, operationSpec.responses[rawResponse.status]); + if (options === null || options === void 0 ? void 0 : options.onResponse) { + options.onResponse(rawResponse, flatResponse); + } + return flatResponse; + } + catch (error) { + if (typeof error === "object" && (error === null || error === void 0 ? void 0 : error.response)) { + const rawResponse = error.response; + const flatResponse = flattenResponse(rawResponse, operationSpec.responses[error.statusCode] || operationSpec.responses["default"]); + error.details = flatResponse; + if (options === null || options === void 0 ? void 0 : options.onResponse) { + options.onResponse(rawResponse, flatResponse, error); + } + } + throw error; + } + } +} +function createDefaultPipeline(options) { + const credentialScopes = getCredentialScopes(options); + const credentialOptions = options.credential && credentialScopes + ? { credentialScopes, credential: options.credential } + : undefined; + return createClientPipeline(Object.assign(Object.assign({}, options), { credentialOptions })); +} +function getCredentialScopes(options) { + if (options.credentialScopes) { + return options.credentialScopes; + } + if (options.endpoint) { + return `${options.endpoint}/.default`; + } + if (options.baseUri) { + return `${options.baseUri}/.default`; + } + if (options.credential && !options.credentialScopes) { + throw new Error(`When using credentials, the ServiceClientOptions must contain either a endpoint or a credentialScopes. Unable to create a bearerTokenAuthenticationPolicy`); + } + return undefined; +} +//# sourceMappingURL=serviceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/state.js b/node_modules/@azure/core-client/dist/react-native/state.js new file mode 100644 index 000000000..1699f50a9 --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/state.js @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// @ts-expect-error The recommended approach to sharing module state between ESM and CJS. +// See https://github.com/isaacs/tshy/blob/main/README.md#module-local-state for additional information. +import { state as cjsState } from "../commonjs/state.js"; +/** + * Defines the shared state between CJS and ESM by re-exporting the CJS state. + */ +export const state = cjsState; +//# sourceMappingURL=state.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/urlHelpers.js b/node_modules/@azure/core-client/dist/react-native/urlHelpers.js new file mode 100644 index 000000000..782f4ef71 --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/urlHelpers.js @@ -0,0 +1,235 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getOperationArgumentValueFromParameter } from "./operationHelpers.js"; +import { getPathStringFromParameter } from "./interfaceHelpers.js"; +const CollectionFormatToDelimiterMap = { + CSV: ",", + SSV: " ", + Multi: "Multi", + TSV: "\t", + Pipes: "|", +}; +export function getRequestUrl(baseUri, operationSpec, operationArguments, fallbackObject) { + const urlReplacements = calculateUrlReplacements(operationSpec, operationArguments, fallbackObject); + let isAbsolutePath = false; + let requestUrl = replaceAll(baseUri, urlReplacements); + if (operationSpec.path) { + let path = replaceAll(operationSpec.path, urlReplacements); + // QUIRK: sometimes we get a path component like /{nextLink} + // which may be a fully formed URL with a leading /. In that case, we should + // remove the leading / + if (operationSpec.path === "/{nextLink}" && path.startsWith("/")) { + path = path.substring(1); + } + // QUIRK: sometimes we get a path component like {nextLink} + // which may be a fully formed URL. In that case, we should + // ignore the baseUri. + if (isAbsoluteUrl(path)) { + requestUrl = path; + isAbsolutePath = true; + } + else { + requestUrl = appendPath(requestUrl, path); + } + } + const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); + /** + * Notice that this call sets the `noOverwrite` parameter to true if the `requestUrl` + * is an absolute path. This ensures that existing query parameter values in `requestUrl` + * do not get overwritten. On the other hand when `requestUrl` is not absolute path, it + * is still being built so there is nothing to overwrite. + */ + requestUrl = appendQueryParams(requestUrl, queryParams, sequenceParams, isAbsolutePath); + return requestUrl; +} +function replaceAll(input, replacements) { + let result = input; + for (const [searchValue, replaceValue] of replacements) { + result = result.split(searchValue).join(replaceValue); + } + return result; +} +function calculateUrlReplacements(operationSpec, operationArguments, fallbackObject) { + var _a; + const result = new Map(); + if ((_a = operationSpec.urlParameters) === null || _a === void 0 ? void 0 : _a.length) { + for (const urlParameter of operationSpec.urlParameters) { + let urlParameterValue = getOperationArgumentValueFromParameter(operationArguments, urlParameter, fallbackObject); + const parameterPathString = getPathStringFromParameter(urlParameter); + urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, parameterPathString); + if (!urlParameter.skipEncoding) { + urlParameterValue = encodeURIComponent(urlParameterValue); + } + result.set(`{${urlParameter.mapper.serializedName || parameterPathString}}`, urlParameterValue); + } + } + return result; +} +function isAbsoluteUrl(url) { + return url.includes("://"); +} +function appendPath(url, pathToAppend) { + if (!pathToAppend) { + return url; + } + const parsedUrl = new URL(url); + let newPath = parsedUrl.pathname; + if (!newPath.endsWith("/")) { + newPath = `${newPath}/`; + } + if (pathToAppend.startsWith("/")) { + pathToAppend = pathToAppend.substring(1); + } + const searchStart = pathToAppend.indexOf("?"); + if (searchStart !== -1) { + const path = pathToAppend.substring(0, searchStart); + const search = pathToAppend.substring(searchStart + 1); + newPath = newPath + path; + if (search) { + parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; + } + } + else { + newPath = newPath + pathToAppend; + } + parsedUrl.pathname = newPath; + return parsedUrl.toString(); +} +function calculateQueryParameters(operationSpec, operationArguments, fallbackObject) { + var _a; + const result = new Map(); + const sequenceParams = new Set(); + if ((_a = operationSpec.queryParameters) === null || _a === void 0 ? void 0 : _a.length) { + for (const queryParameter of operationSpec.queryParameters) { + if (queryParameter.mapper.type.name === "Sequence" && queryParameter.mapper.serializedName) { + sequenceParams.add(queryParameter.mapper.serializedName); + } + let queryParameterValue = getOperationArgumentValueFromParameter(operationArguments, queryParameter, fallbackObject); + if ((queryParameterValue !== undefined && queryParameterValue !== null) || + queryParameter.mapper.required) { + queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter)); + const delimiter = queryParameter.collectionFormat + ? CollectionFormatToDelimiterMap[queryParameter.collectionFormat] + : ""; + if (Array.isArray(queryParameterValue)) { + // replace null and undefined + queryParameterValue = queryParameterValue.map((item) => { + if (item === null || item === undefined) { + return ""; + } + return item; + }); + } + if (queryParameter.collectionFormat === "Multi" && queryParameterValue.length === 0) { + continue; + } + else if (Array.isArray(queryParameterValue) && + (queryParameter.collectionFormat === "SSV" || queryParameter.collectionFormat === "TSV")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + if (!queryParameter.skipEncoding) { + if (Array.isArray(queryParameterValue)) { + queryParameterValue = queryParameterValue.map((item) => { + return encodeURIComponent(item); + }); + } + else { + queryParameterValue = encodeURIComponent(queryParameterValue); + } + } + // Join pipes and CSV *after* encoding, or the server will be upset. + if (Array.isArray(queryParameterValue) && + (queryParameter.collectionFormat === "CSV" || queryParameter.collectionFormat === "Pipes")) { + queryParameterValue = queryParameterValue.join(delimiter); + } + result.set(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); + } + } + } + return { + queryParams: result, + sequenceParams, + }; +} +function simpleParseQueryParams(queryString) { + const result = new Map(); + if (!queryString || queryString[0] !== "?") { + return result; + } + // remove the leading ? + queryString = queryString.slice(1); + const pairs = queryString.split("&"); + for (const pair of pairs) { + const [name, value] = pair.split("=", 2); + const existingValue = result.get(name); + if (existingValue) { + if (Array.isArray(existingValue)) { + existingValue.push(value); + } + else { + result.set(name, [existingValue, value]); + } + } + else { + result.set(name, value); + } + } + return result; +} +/** @internal */ +export function appendQueryParams(url, queryParams, sequenceParams, noOverwrite = false) { + if (queryParams.size === 0) { + return url; + } + const parsedUrl = new URL(url); + // QUIRK: parsedUrl.searchParams will have their name/value pairs decoded, which + // can change their meaning to the server, such as in the case of a SAS signature. + // To avoid accidentally un-encoding a query param, we parse the key/values ourselves + const combinedParams = simpleParseQueryParams(parsedUrl.search); + for (const [name, value] of queryParams) { + const existingValue = combinedParams.get(name); + if (Array.isArray(existingValue)) { + if (Array.isArray(value)) { + existingValue.push(...value); + const valueSet = new Set(existingValue); + combinedParams.set(name, Array.from(valueSet)); + } + else { + existingValue.push(value); + } + } + else if (existingValue) { + if (Array.isArray(value)) { + value.unshift(existingValue); + } + else if (sequenceParams.has(name)) { + combinedParams.set(name, [existingValue, value]); + } + if (!noOverwrite) { + combinedParams.set(name, value); + } + } + else { + combinedParams.set(name, value); + } + } + const searchPieces = []; + for (const [name, value] of combinedParams) { + if (typeof value === "string") { + searchPieces.push(`${name}=${value}`); + } + else if (Array.isArray(value)) { + // QUIRK: If we get an array of values, include multiple key/value pairs + for (const subValue of value) { + searchPieces.push(`${name}=${subValue}`); + } + } + else { + searchPieces.push(`${name}=${value}`); + } + } + // QUIRK: we have to set search manually as searchParams will encode comma when it shouldn't. + parsedUrl.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return parsedUrl.toString(); +} +//# sourceMappingURL=urlHelpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/dist/react-native/utils.js b/node_modules/@azure/core-client/dist/react-native/utils.js new file mode 100644 index 000000000..7a6268631 --- /dev/null +++ b/node_modules/@azure/core-client/dist/react-native/utils.js @@ -0,0 +1,115 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A type guard for a primitive response body. + * @param value - Value to test + * + * @internal + */ +export function isPrimitiveBody(value, mapperTypeName) { + return (mapperTypeName !== "Composite" && + mapperTypeName !== "Dictionary" && + (typeof value === "string" || + typeof value === "number" || + typeof value === "boolean" || + (mapperTypeName === null || mapperTypeName === void 0 ? void 0 : mapperTypeName.match(/^(Date|DateTime|DateTimeRfc1123|UnixTime|ByteArray|Base64Url)$/i)) !== + null || + value === undefined || + value === null)); +} +const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; +/** + * Returns true if the given string is in ISO 8601 format. + * @param value - The value to be validated for ISO 8601 duration format. + * @internal + */ +export function isDuration(value) { + return validateISODuration.test(value); +} +const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; +/** + * Returns true if the provided uuid is valid. + * + * @param uuid - The uuid that needs to be validated. + * + * @internal + */ +export function isValidUuid(uuid) { + return validUuidRegex.test(uuid); +} +/** + * Maps the response as follows: + * - wraps the response body if needed (typically if its type is primitive). + * - returns null if the combination of the headers and the body is empty. + * - otherwise, returns the combination of the headers and the body. + * + * @param responseObject - a representation of the parsed response + * @returns the response that will be returned to the user which can be null and/or wrapped + * + * @internal + */ +function handleNullableResponseAndWrappableBody(responseObject) { + const combinedHeadersAndBody = Object.assign(Object.assign({}, responseObject.headers), responseObject.body); + if (responseObject.hasNullableType && + Object.getOwnPropertyNames(combinedHeadersAndBody).length === 0) { + return responseObject.shouldWrapBody ? { body: null } : null; + } + else { + return responseObject.shouldWrapBody + ? Object.assign(Object.assign({}, responseObject.headers), { body: responseObject.body }) : combinedHeadersAndBody; + } +} +/** + * Take a `FullOperationResponse` and turn it into a flat + * response object to hand back to the consumer. + * @param fullResponse - The processed response from the operation request + * @param responseSpec - The response map from the OperationSpec + * + * @internal + */ +export function flattenResponse(fullResponse, responseSpec) { + var _a, _b; + const parsedHeaders = fullResponse.parsedHeaders; + // head methods never have a body, but we return a boolean set to body property + // to indicate presence/absence of the resource + if (fullResponse.request.method === "HEAD") { + return Object.assign(Object.assign({}, parsedHeaders), { body: fullResponse.parsedBody }); + } + const bodyMapper = responseSpec && responseSpec.bodyMapper; + const isNullable = Boolean(bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.nullable); + const expectedBodyTypeName = bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.type.name; + /** If the body is asked for, we look at the expected body type to handle it */ + if (expectedBodyTypeName === "Stream") { + return Object.assign(Object.assign({}, parsedHeaders), { blobBody: fullResponse.blobBody, readableStreamBody: fullResponse.readableStreamBody }); + } + const modelProperties = (expectedBodyTypeName === "Composite" && + bodyMapper.type.modelProperties) || + {}; + const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); + if (expectedBodyTypeName === "Sequence" || isPageableResponse) { + const arrayResponse = (_a = fullResponse.parsedBody) !== null && _a !== void 0 ? _a : []; + for (const key of Object.keys(modelProperties)) { + if (modelProperties[key].serializedName) { + arrayResponse[key] = (_b = fullResponse.parsedBody) === null || _b === void 0 ? void 0 : _b[key]; + } + } + if (parsedHeaders) { + for (const key of Object.keys(parsedHeaders)) { + arrayResponse[key] = parsedHeaders[key]; + } + } + return isNullable && + !fullResponse.parsedBody && + !parsedHeaders && + Object.getOwnPropertyNames(modelProperties).length === 0 + ? null + : arrayResponse; + } + return handleNullableResponseAndWrappableBody({ + body: fullResponse.parsedBody, + headers: parsedHeaders, + hasNullableType: isNullable, + shouldWrapBody: isPrimitiveBody(fullResponse.parsedBody, expectedBodyTypeName), + }); +} +//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-client/package.json b/node_modules/@azure/core-client/package.json new file mode 100644 index 000000000..23de5e6fd --- /dev/null +++ b/node_modules/@azure/core-client/package.json @@ -0,0 +1,117 @@ +{ + "name": "@azure/core-client", + "version": "1.9.2", + "description": "Core library for interfacing with AutoRest generated code", + "sdk-type": "client", + "type": "module", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "browser": "./dist/browser/index.js", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "files": [ + "dist/", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "engines": { + "node": ">=18.0.0" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-client/", + "sideEffects": false, + "prettier": "@azure/eslint-plugin-azure-sdk/prettier.json", + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && api-extractor run --local", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "clean": "rimraf --glob dist temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tshy && api-extractor run --local", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tshy && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tshy && npm run unit-test:node && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-auth": "^1.4.0", + "@azure/core-rest-pipeline": "^1.9.1", + "@azure/core-tracing": "^1.0.0", + "@azure/core-util": "^1.6.1", + "@azure/logger": "^1.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@azure/core-xml": "^1.3.4", + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "^7.40.3", + "@types/node": "^18.0.0", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "eslint": "^8.56.0", + "playwright": "^1.41.2", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tshy": "^1.13.0", + "typescript": "~5.3.3", + "vitest": "^1.3.1" + }, + "//metadata": { + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false + } +} diff --git a/node_modules/@azure/core-http-compat/dist/browser/extendedClient.js b/node_modules/@azure/core-http-compat/dist/browser/extendedClient.js new file mode 100644 index 000000000..7995e4e9c --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/browser/extendedClient.js @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createDisableKeepAlivePolicy, pipelineContainsDisableKeepAlivePolicy, } from "./policies/disableKeepAlivePolicy.js"; +import { redirectPolicyName } from "@azure/core-rest-pipeline"; +import { ServiceClient, } from "@azure/core-client"; +import { toCompatResponse } from "./response.js"; +/** + * Client to provide compatability between core V1 & V2. + */ +export class ExtendedServiceClient extends ServiceClient { + constructor(options) { + var _a, _b; + super(options); + if (((_a = options.keepAliveOptions) === null || _a === void 0 ? void 0 : _a.enable) === false && + !pipelineContainsDisableKeepAlivePolicy(this.pipeline)) { + this.pipeline.addPolicy(createDisableKeepAlivePolicy()); + } + if (((_b = options.redirectOptions) === null || _b === void 0 ? void 0 : _b.handleRedirects) === false) { + this.pipeline.removePolicy({ + name: redirectPolicyName, + }); + } + } + /** + * Compatible send operation request function. + * + * @param operationArguments - Operation arguments + * @param operationSpec - Operation Spec + * @returns + */ + async sendOperationRequest(operationArguments, operationSpec) { + var _a; + const userProvidedCallBack = (_a = operationArguments === null || operationArguments === void 0 ? void 0 : operationArguments.options) === null || _a === void 0 ? void 0 : _a.onResponse; + let lastResponse; + function onResponse(rawResponse, flatResponse, error) { + lastResponse = rawResponse; + if (userProvidedCallBack) { + userProvidedCallBack(rawResponse, flatResponse, error); + } + } + operationArguments.options = Object.assign(Object.assign({}, operationArguments.options), { onResponse }); + const result = await super.sendOperationRequest(operationArguments, operationSpec); + if (lastResponse) { + Object.defineProperty(result, "_response", { + value: toCompatResponse(lastResponse), + }); + } + return result; + } +} +//# sourceMappingURL=extendedClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/browser/httpClientAdapter.js b/node_modules/@azure/core-http-compat/dist/browser/httpClientAdapter.js new file mode 100644 index 000000000..65ea2a830 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/browser/httpClientAdapter.js @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { toPipelineResponse } from "./response.js"; +import { toWebResourceLike } from "./util.js"; +/** + * Converts a RequestPolicy based HttpClient to a PipelineRequest based HttpClient. + * @param requestPolicyClient - A HttpClient compatible with core-http + * @returns A HttpClient compatible with core-rest-pipeline + */ +export function convertHttpClient(requestPolicyClient) { + return { + sendRequest: async (request) => { + const response = await requestPolicyClient.sendRequest(toWebResourceLike(request, { createProxy: true })); + return toPipelineResponse(response); + }, + }; +} +//# sourceMappingURL=httpClientAdapter.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/browser/index.js b/node_modules/@azure/core-http-compat/dist/browser/index.js new file mode 100644 index 000000000..697417600 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/browser/index.js @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A Shim Library that provides compatibility between Core V1 & V2 Packages. + * + * @packageDocumentation + */ +export { ExtendedServiceClient, } from "./extendedClient.js"; +export { requestPolicyFactoryPolicyName, createRequestPolicyFactoryPolicy, HttpPipelineLogLevel, } from "./policies/requestPolicyFactoryPolicy.js"; +export { disableKeepAlivePolicyName } from "./policies/disableKeepAlivePolicy.js"; +export { convertHttpClient } from "./httpClientAdapter.js"; +export { toHttpHeadersLike, } from "./util.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/browser/package.json b/node_modules/@azure/core-http-compat/dist/browser/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-http-compat/dist/browser/policies/disableKeepAlivePolicy.js b/node_modules/@azure/core-http-compat/dist/browser/policies/disableKeepAlivePolicy.js new file mode 100644 index 000000000..9c884add1 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/browser/policies/disableKeepAlivePolicy.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const disableKeepAlivePolicyName = "DisableKeepAlivePolicy"; +export function createDisableKeepAlivePolicy() { + return { + name: disableKeepAlivePolicyName, + async sendRequest(request, next) { + request.disableKeepAlive = true; + return next(request); + }, + }; +} +/** + * @internal + */ +export function pipelineContainsDisableKeepAlivePolicy(pipeline) { + return pipeline.getOrderedPolicies().some((policy) => policy.name === disableKeepAlivePolicyName); +} +//# sourceMappingURL=disableKeepAlivePolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/browser/policies/keepAliveOptions.js b/node_modules/@azure/core-http-compat/dist/browser/policies/keepAliveOptions.js new file mode 100644 index 000000000..fa48fc505 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/browser/policies/keepAliveOptions.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=keepAliveOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/browser/policies/redirectOptions.js b/node_modules/@azure/core-http-compat/dist/browser/policies/redirectOptions.js new file mode 100644 index 000000000..a532ff41c --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/browser/policies/redirectOptions.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=redirectOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/browser/policies/requestPolicyFactoryPolicy.js b/node_modules/@azure/core-http-compat/dist/browser/policies/requestPolicyFactoryPolicy.js new file mode 100644 index 000000000..e016492d5 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/browser/policies/requestPolicyFactoryPolicy.js @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { toPipelineRequest, toWebResourceLike } from "../util.js"; +import { toCompatResponse, toPipelineResponse } from "../response.js"; +/** + * An enum for compatibility with RequestPolicy + */ +export var HttpPipelineLogLevel; +(function (HttpPipelineLogLevel) { + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; +})(HttpPipelineLogLevel || (HttpPipelineLogLevel = {})); +const mockRequestPolicyOptions = { + log(_logLevel, _message) { + /* do nothing */ + }, + shouldLog(_logLevel) { + return false; + }, +}; +/** + * The name of the RequestPolicyFactoryPolicy + */ +export const requestPolicyFactoryPolicyName = "RequestPolicyFactoryPolicy"; +/** + * A policy that wraps policies written for core-http. + * @param factories - An array of `RequestPolicyFactory` objects from a core-http pipeline + */ +export function createRequestPolicyFactoryPolicy(factories) { + const orderedFactories = factories.slice().reverse(); + return { + name: requestPolicyFactoryPolicyName, + async sendRequest(request, next) { + let httpPipeline = { + async sendRequest(httpRequest) { + const response = await next(toPipelineRequest(httpRequest)); + return toCompatResponse(response, { createProxy: true }); + }, + }; + for (const factory of orderedFactories) { + httpPipeline = factory.create(httpPipeline, mockRequestPolicyOptions); + } + const webResourceLike = toWebResourceLike(request, { createProxy: true }); + const response = await httpPipeline.sendRequest(webResourceLike); + return toPipelineResponse(response); + }, + }; +} +//# sourceMappingURL=requestPolicyFactoryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/browser/response.js b/node_modules/@azure/core-http-compat/dist/browser/response.js new file mode 100644 index 000000000..7fe7d46ea --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/browser/response.js @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHttpHeaders } from "@azure/core-rest-pipeline"; +import { toHttpHeadersLike, toPipelineRequest, toWebResourceLike, } from "./util.js"; +const originalResponse = Symbol("Original FullOperationResponse"); +/** + * A helper to convert response objects from the new pipeline back to the old one. + * @param response - A response object from core-client. + * @returns A response compatible with `HttpOperationResponse` from core-http. + */ +export function toCompatResponse(response, options) { + let request = toWebResourceLike(response.request); + let headers = toHttpHeadersLike(response.headers); + if (options === null || options === void 0 ? void 0 : options.createProxy) { + return new Proxy(response, { + get(target, prop, receiver) { + if (prop === "headers") { + return headers; + } + else if (prop === "request") { + return request; + } + else if (prop === originalResponse) { + return response; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "headers") { + headers = value; + } + else if (prop === "request") { + request = value; + } + return Reflect.set(target, prop, value, receiver); + }, + }); + } + else { + return Object.assign(Object.assign({}, response), { request, + headers }); + } +} +/** + * A helper to convert back to a PipelineResponse + * @param compatResponse - A response compatible with `HttpOperationResponse` from core-http. + */ +export function toPipelineResponse(compatResponse) { + const extendedCompatResponse = compatResponse; + const response = extendedCompatResponse[originalResponse]; + const headers = createHttpHeaders(compatResponse.headers.toJson({ preserveCase: true })); + if (response) { + response.headers = headers; + return response; + } + else { + return Object.assign(Object.assign({}, compatResponse), { headers, request: toPipelineRequest(compatResponse.request) }); + } +} +//# sourceMappingURL=response.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/browser/util.js b/node_modules/@azure/core-http-compat/dist/browser/util.js new file mode 100644 index 000000000..e47fa99e8 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/browser/util.js @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHttpHeaders, createPipelineRequest, } from "@azure/core-rest-pipeline"; +// We use a custom symbol to cache a reference to the original request without +// exposing it on the public interface. +const originalRequestSymbol = Symbol("Original PipelineRequest"); +// Symbol.for() will return the same symbol if it's already been created +// This particular one is used in core-client to handle the case of when a request is +// cloned but we need to retrieve the OperationSpec and OperationArguments from the +// original request. +const originalClientRequestSymbol = Symbol.for("@azure/core-client original request"); +export function toPipelineRequest(webResource, options = {}) { + const compatWebResource = webResource; + const request = compatWebResource[originalRequestSymbol]; + const headers = createHttpHeaders(webResource.headers.toJson({ preserveCase: true })); + if (request) { + request.headers = headers; + return request; + } + else { + const newRequest = createPipelineRequest({ + url: webResource.url, + method: webResource.method, + headers, + withCredentials: webResource.withCredentials, + timeout: webResource.timeout, + requestId: webResource.requestId, + abortSignal: webResource.abortSignal, + body: webResource.body, + formData: webResource.formData, + disableKeepAlive: !!webResource.keepAlive, + onDownloadProgress: webResource.onDownloadProgress, + onUploadProgress: webResource.onUploadProgress, + proxySettings: webResource.proxySettings, + streamResponseStatusCodes: webResource.streamResponseStatusCodes, + }); + if (options.originalRequest) { + newRequest[originalClientRequestSymbol] = + options.originalRequest; + } + return newRequest; + } +} +export function toWebResourceLike(request, options) { + var _a; + const originalRequest = (_a = options === null || options === void 0 ? void 0 : options.originalRequest) !== null && _a !== void 0 ? _a : request; + const webResource = { + url: request.url, + method: request.method, + headers: toHttpHeadersLike(request.headers), + withCredentials: request.withCredentials, + timeout: request.timeout, + requestId: request.headers.get("x-ms-client-request-id") || request.requestId, + abortSignal: request.abortSignal, + body: request.body, + formData: request.formData, + keepAlive: !!request.disableKeepAlive, + onDownloadProgress: request.onDownloadProgress, + onUploadProgress: request.onUploadProgress, + proxySettings: request.proxySettings, + streamResponseStatusCodes: request.streamResponseStatusCodes, + clone() { + throw new Error("Cannot clone a non-proxied WebResourceLike"); + }, + prepare() { + throw new Error("WebResourceLike.prepare() is not supported by @azure/core-http-compat"); + }, + validateRequestProperties() { + /** do nothing */ + }, + }; + if (options === null || options === void 0 ? void 0 : options.createProxy) { + return new Proxy(webResource, { + get(target, prop, receiver) { + if (prop === originalRequestSymbol) { + return request; + } + else if (prop === "clone") { + return () => { + return toWebResourceLike(toPipelineRequest(webResource, { originalRequest }), { + createProxy: true, + originalRequest, + }); + }; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "keepAlive") { + request.disableKeepAlive = !value; + } + const passThroughProps = [ + "url", + "method", + "withCredentials", + "timeout", + "requestId", + "abortSignal", + "body", + "formData", + "onDownloadProgress", + "onUploadProgress", + "proxySettings", + "streamResponseStatusCodes", + ]; + if (typeof prop === "string" && passThroughProps.includes(prop)) { + request[prop] = value; + } + return Reflect.set(target, prop, value, receiver); + }, + }); + } + else { + return webResource; + } +} +/** + * Converts HttpHeaders from core-rest-pipeline to look like + * HttpHeaders from core-http. + * @param headers - HttpHeaders from core-rest-pipeline + * @returns HttpHeaders as they looked in core-http + */ +export function toHttpHeadersLike(headers) { + return new HttpHeaders(headers.toJSON({ preserveCase: true })); +} +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +/** + * A collection of HTTP header key/value pairs. + */ +export class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); + } +} +//# sourceMappingURL=util.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/commonjs/extendedClient.js b/node_modules/@azure/core-http-compat/dist/commonjs/extendedClient.js new file mode 100644 index 000000000..1faaaee26 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/commonjs/extendedClient.js @@ -0,0 +1,55 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ExtendedServiceClient = void 0; +const disableKeepAlivePolicy_js_1 = require("./policies/disableKeepAlivePolicy.js"); +const core_rest_pipeline_1 = require("@azure/core-rest-pipeline"); +const core_client_1 = require("@azure/core-client"); +const response_js_1 = require("./response.js"); +/** + * Client to provide compatability between core V1 & V2. + */ +class ExtendedServiceClient extends core_client_1.ServiceClient { + constructor(options) { + var _a, _b; + super(options); + if (((_a = options.keepAliveOptions) === null || _a === void 0 ? void 0 : _a.enable) === false && + !(0, disableKeepAlivePolicy_js_1.pipelineContainsDisableKeepAlivePolicy)(this.pipeline)) { + this.pipeline.addPolicy((0, disableKeepAlivePolicy_js_1.createDisableKeepAlivePolicy)()); + } + if (((_b = options.redirectOptions) === null || _b === void 0 ? void 0 : _b.handleRedirects) === false) { + this.pipeline.removePolicy({ + name: core_rest_pipeline_1.redirectPolicyName, + }); + } + } + /** + * Compatible send operation request function. + * + * @param operationArguments - Operation arguments + * @param operationSpec - Operation Spec + * @returns + */ + async sendOperationRequest(operationArguments, operationSpec) { + var _a; + const userProvidedCallBack = (_a = operationArguments === null || operationArguments === void 0 ? void 0 : operationArguments.options) === null || _a === void 0 ? void 0 : _a.onResponse; + let lastResponse; + function onResponse(rawResponse, flatResponse, error) { + lastResponse = rawResponse; + if (userProvidedCallBack) { + userProvidedCallBack(rawResponse, flatResponse, error); + } + } + operationArguments.options = Object.assign(Object.assign({}, operationArguments.options), { onResponse }); + const result = await super.sendOperationRequest(operationArguments, operationSpec); + if (lastResponse) { + Object.defineProperty(result, "_response", { + value: (0, response_js_1.toCompatResponse)(lastResponse), + }); + } + return result; + } +} +exports.ExtendedServiceClient = ExtendedServiceClient; +//# sourceMappingURL=extendedClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/commonjs/httpClientAdapter.js b/node_modules/@azure/core-http-compat/dist/commonjs/httpClientAdapter.js new file mode 100644 index 000000000..466d115d8 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/commonjs/httpClientAdapter.js @@ -0,0 +1,22 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.convertHttpClient = void 0; +const response_js_1 = require("./response.js"); +const util_js_1 = require("./util.js"); +/** + * Converts a RequestPolicy based HttpClient to a PipelineRequest based HttpClient. + * @param requestPolicyClient - A HttpClient compatible with core-http + * @returns A HttpClient compatible with core-rest-pipeline + */ +function convertHttpClient(requestPolicyClient) { + return { + sendRequest: async (request) => { + const response = await requestPolicyClient.sendRequest((0, util_js_1.toWebResourceLike)(request, { createProxy: true })); + return (0, response_js_1.toPipelineResponse)(response); + }, + }; +} +exports.convertHttpClient = convertHttpClient; +//# sourceMappingURL=httpClientAdapter.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/commonjs/index.js b/node_modules/@azure/core-http-compat/dist/commonjs/index.js new file mode 100644 index 000000000..89f78043e --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/commonjs/index.js @@ -0,0 +1,23 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toHttpHeadersLike = exports.convertHttpClient = exports.disableKeepAlivePolicyName = exports.HttpPipelineLogLevel = exports.createRequestPolicyFactoryPolicy = exports.requestPolicyFactoryPolicyName = exports.ExtendedServiceClient = void 0; +/** + * A Shim Library that provides compatibility between Core V1 & V2 Packages. + * + * @packageDocumentation + */ +var extendedClient_js_1 = require("./extendedClient.js"); +Object.defineProperty(exports, "ExtendedServiceClient", { enumerable: true, get: function () { return extendedClient_js_1.ExtendedServiceClient; } }); +var requestPolicyFactoryPolicy_js_1 = require("./policies/requestPolicyFactoryPolicy.js"); +Object.defineProperty(exports, "requestPolicyFactoryPolicyName", { enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.requestPolicyFactoryPolicyName; } }); +Object.defineProperty(exports, "createRequestPolicyFactoryPolicy", { enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.createRequestPolicyFactoryPolicy; } }); +Object.defineProperty(exports, "HttpPipelineLogLevel", { enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.HttpPipelineLogLevel; } }); +var disableKeepAlivePolicy_js_1 = require("./policies/disableKeepAlivePolicy.js"); +Object.defineProperty(exports, "disableKeepAlivePolicyName", { enumerable: true, get: function () { return disableKeepAlivePolicy_js_1.disableKeepAlivePolicyName; } }); +var httpClientAdapter_js_1 = require("./httpClientAdapter.js"); +Object.defineProperty(exports, "convertHttpClient", { enumerable: true, get: function () { return httpClientAdapter_js_1.convertHttpClient; } }); +var util_js_1 = require("./util.js"); +Object.defineProperty(exports, "toHttpHeadersLike", { enumerable: true, get: function () { return util_js_1.toHttpHeadersLike; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/commonjs/package.json b/node_modules/@azure/core-http-compat/dist/commonjs/package.json new file mode 100644 index 000000000..5bbefffba --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/core-http-compat/dist/commonjs/policies/disableKeepAlivePolicy.js b/node_modules/@azure/core-http-compat/dist/commonjs/policies/disableKeepAlivePolicy.js new file mode 100644 index 000000000..f7304e412 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/commonjs/policies/disableKeepAlivePolicy.js @@ -0,0 +1,24 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.pipelineContainsDisableKeepAlivePolicy = exports.createDisableKeepAlivePolicy = exports.disableKeepAlivePolicyName = void 0; +exports.disableKeepAlivePolicyName = "DisableKeepAlivePolicy"; +function createDisableKeepAlivePolicy() { + return { + name: exports.disableKeepAlivePolicyName, + async sendRequest(request, next) { + request.disableKeepAlive = true; + return next(request); + }, + }; +} +exports.createDisableKeepAlivePolicy = createDisableKeepAlivePolicy; +/** + * @internal + */ +function pipelineContainsDisableKeepAlivePolicy(pipeline) { + return pipeline.getOrderedPolicies().some((policy) => policy.name === exports.disableKeepAlivePolicyName); +} +exports.pipelineContainsDisableKeepAlivePolicy = pipelineContainsDisableKeepAlivePolicy; +//# sourceMappingURL=disableKeepAlivePolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/commonjs/policies/keepAliveOptions.js b/node_modules/@azure/core-http-compat/dist/commonjs/policies/keepAliveOptions.js new file mode 100644 index 000000000..de84e7f30 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/commonjs/policies/keepAliveOptions.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=keepAliveOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/commonjs/policies/redirectOptions.js b/node_modules/@azure/core-http-compat/dist/commonjs/policies/redirectOptions.js new file mode 100644 index 000000000..5fb00fe6b --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/commonjs/policies/redirectOptions.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=redirectOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/commonjs/policies/requestPolicyFactoryPolicy.js b/node_modules/@azure/core-http-compat/dist/commonjs/policies/requestPolicyFactoryPolicy.js new file mode 100644 index 000000000..8fbb81b4a --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/commonjs/policies/requestPolicyFactoryPolicy.js @@ -0,0 +1,55 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createRequestPolicyFactoryPolicy = exports.requestPolicyFactoryPolicyName = exports.HttpPipelineLogLevel = void 0; +const util_js_1 = require("../util.js"); +const response_js_1 = require("../response.js"); +/** + * An enum for compatibility with RequestPolicy + */ +var HttpPipelineLogLevel; +(function (HttpPipelineLogLevel) { + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; +})(HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = HttpPipelineLogLevel = {})); +const mockRequestPolicyOptions = { + log(_logLevel, _message) { + /* do nothing */ + }, + shouldLog(_logLevel) { + return false; + }, +}; +/** + * The name of the RequestPolicyFactoryPolicy + */ +exports.requestPolicyFactoryPolicyName = "RequestPolicyFactoryPolicy"; +/** + * A policy that wraps policies written for core-http. + * @param factories - An array of `RequestPolicyFactory` objects from a core-http pipeline + */ +function createRequestPolicyFactoryPolicy(factories) { + const orderedFactories = factories.slice().reverse(); + return { + name: exports.requestPolicyFactoryPolicyName, + async sendRequest(request, next) { + let httpPipeline = { + async sendRequest(httpRequest) { + const response = await next((0, util_js_1.toPipelineRequest)(httpRequest)); + return (0, response_js_1.toCompatResponse)(response, { createProxy: true }); + }, + }; + for (const factory of orderedFactories) { + httpPipeline = factory.create(httpPipeline, mockRequestPolicyOptions); + } + const webResourceLike = (0, util_js_1.toWebResourceLike)(request, { createProxy: true }); + const response = await httpPipeline.sendRequest(webResourceLike); + return (0, response_js_1.toPipelineResponse)(response); + }, + }; +} +exports.createRequestPolicyFactoryPolicy = createRequestPolicyFactoryPolicy; +//# sourceMappingURL=requestPolicyFactoryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/commonjs/response.js b/node_modules/@azure/core-http-compat/dist/commonjs/response.js new file mode 100644 index 000000000..e49b46730 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/commonjs/response.js @@ -0,0 +1,65 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.toPipelineResponse = exports.toCompatResponse = void 0; +const core_rest_pipeline_1 = require("@azure/core-rest-pipeline"); +const util_js_1 = require("./util.js"); +const originalResponse = Symbol("Original FullOperationResponse"); +/** + * A helper to convert response objects from the new pipeline back to the old one. + * @param response - A response object from core-client. + * @returns A response compatible with `HttpOperationResponse` from core-http. + */ +function toCompatResponse(response, options) { + let request = (0, util_js_1.toWebResourceLike)(response.request); + let headers = (0, util_js_1.toHttpHeadersLike)(response.headers); + if (options === null || options === void 0 ? void 0 : options.createProxy) { + return new Proxy(response, { + get(target, prop, receiver) { + if (prop === "headers") { + return headers; + } + else if (prop === "request") { + return request; + } + else if (prop === originalResponse) { + return response; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "headers") { + headers = value; + } + else if (prop === "request") { + request = value; + } + return Reflect.set(target, prop, value, receiver); + }, + }); + } + else { + return Object.assign(Object.assign({}, response), { request, + headers }); + } +} +exports.toCompatResponse = toCompatResponse; +/** + * A helper to convert back to a PipelineResponse + * @param compatResponse - A response compatible with `HttpOperationResponse` from core-http. + */ +function toPipelineResponse(compatResponse) { + const extendedCompatResponse = compatResponse; + const response = extendedCompatResponse[originalResponse]; + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(compatResponse.headers.toJson({ preserveCase: true })); + if (response) { + response.headers = headers; + return response; + } + else { + return Object.assign(Object.assign({}, compatResponse), { headers, request: (0, util_js_1.toPipelineRequest)(compatResponse.request) }); + } +} +exports.toPipelineResponse = toPipelineResponse; +//# sourceMappingURL=response.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-http-compat/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 000000000..6305f1798 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.43.1" + } + ] +} diff --git a/node_modules/@azure/core-http-compat/dist/commonjs/util.js b/node_modules/@azure/core-http-compat/dist/commonjs/util.js new file mode 100644 index 000000000..f00a486f4 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/commonjs/util.js @@ -0,0 +1,263 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.HttpHeaders = exports.toHttpHeadersLike = exports.toWebResourceLike = exports.toPipelineRequest = void 0; +const core_rest_pipeline_1 = require("@azure/core-rest-pipeline"); +// We use a custom symbol to cache a reference to the original request without +// exposing it on the public interface. +const originalRequestSymbol = Symbol("Original PipelineRequest"); +// Symbol.for() will return the same symbol if it's already been created +// This particular one is used in core-client to handle the case of when a request is +// cloned but we need to retrieve the OperationSpec and OperationArguments from the +// original request. +const originalClientRequestSymbol = Symbol.for("@azure/core-client original request"); +function toPipelineRequest(webResource, options = {}) { + const compatWebResource = webResource; + const request = compatWebResource[originalRequestSymbol]; + const headers = (0, core_rest_pipeline_1.createHttpHeaders)(webResource.headers.toJson({ preserveCase: true })); + if (request) { + request.headers = headers; + return request; + } + else { + const newRequest = (0, core_rest_pipeline_1.createPipelineRequest)({ + url: webResource.url, + method: webResource.method, + headers, + withCredentials: webResource.withCredentials, + timeout: webResource.timeout, + requestId: webResource.requestId, + abortSignal: webResource.abortSignal, + body: webResource.body, + formData: webResource.formData, + disableKeepAlive: !!webResource.keepAlive, + onDownloadProgress: webResource.onDownloadProgress, + onUploadProgress: webResource.onUploadProgress, + proxySettings: webResource.proxySettings, + streamResponseStatusCodes: webResource.streamResponseStatusCodes, + }); + if (options.originalRequest) { + newRequest[originalClientRequestSymbol] = + options.originalRequest; + } + return newRequest; + } +} +exports.toPipelineRequest = toPipelineRequest; +function toWebResourceLike(request, options) { + var _a; + const originalRequest = (_a = options === null || options === void 0 ? void 0 : options.originalRequest) !== null && _a !== void 0 ? _a : request; + const webResource = { + url: request.url, + method: request.method, + headers: toHttpHeadersLike(request.headers), + withCredentials: request.withCredentials, + timeout: request.timeout, + requestId: request.headers.get("x-ms-client-request-id") || request.requestId, + abortSignal: request.abortSignal, + body: request.body, + formData: request.formData, + keepAlive: !!request.disableKeepAlive, + onDownloadProgress: request.onDownloadProgress, + onUploadProgress: request.onUploadProgress, + proxySettings: request.proxySettings, + streamResponseStatusCodes: request.streamResponseStatusCodes, + clone() { + throw new Error("Cannot clone a non-proxied WebResourceLike"); + }, + prepare() { + throw new Error("WebResourceLike.prepare() is not supported by @azure/core-http-compat"); + }, + validateRequestProperties() { + /** do nothing */ + }, + }; + if (options === null || options === void 0 ? void 0 : options.createProxy) { + return new Proxy(webResource, { + get(target, prop, receiver) { + if (prop === originalRequestSymbol) { + return request; + } + else if (prop === "clone") { + return () => { + return toWebResourceLike(toPipelineRequest(webResource, { originalRequest }), { + createProxy: true, + originalRequest, + }); + }; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "keepAlive") { + request.disableKeepAlive = !value; + } + const passThroughProps = [ + "url", + "method", + "withCredentials", + "timeout", + "requestId", + "abortSignal", + "body", + "formData", + "onDownloadProgress", + "onUploadProgress", + "proxySettings", + "streamResponseStatusCodes", + ]; + if (typeof prop === "string" && passThroughProps.includes(prop)) { + request[prop] = value; + } + return Reflect.set(target, prop, value, receiver); + }, + }); + } + else { + return webResource; + } +} +exports.toWebResourceLike = toWebResourceLike; +/** + * Converts HttpHeaders from core-rest-pipeline to look like + * HttpHeaders from core-http. + * @param headers - HttpHeaders from core-rest-pipeline + * @returns HttpHeaders as they looked in core-http + */ +function toHttpHeadersLike(headers) { + return new HttpHeaders(headers.toJSON({ preserveCase: true })); +} +exports.toHttpHeadersLike = toHttpHeadersLike; +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +/** + * A collection of HTTP header key/value pairs. + */ +class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); + } +} +exports.HttpHeaders = HttpHeaders; +//# sourceMappingURL=util.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/esm/extendedClient.js b/node_modules/@azure/core-http-compat/dist/esm/extendedClient.js new file mode 100644 index 000000000..7995e4e9c --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/esm/extendedClient.js @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createDisableKeepAlivePolicy, pipelineContainsDisableKeepAlivePolicy, } from "./policies/disableKeepAlivePolicy.js"; +import { redirectPolicyName } from "@azure/core-rest-pipeline"; +import { ServiceClient, } from "@azure/core-client"; +import { toCompatResponse } from "./response.js"; +/** + * Client to provide compatability between core V1 & V2. + */ +export class ExtendedServiceClient extends ServiceClient { + constructor(options) { + var _a, _b; + super(options); + if (((_a = options.keepAliveOptions) === null || _a === void 0 ? void 0 : _a.enable) === false && + !pipelineContainsDisableKeepAlivePolicy(this.pipeline)) { + this.pipeline.addPolicy(createDisableKeepAlivePolicy()); + } + if (((_b = options.redirectOptions) === null || _b === void 0 ? void 0 : _b.handleRedirects) === false) { + this.pipeline.removePolicy({ + name: redirectPolicyName, + }); + } + } + /** + * Compatible send operation request function. + * + * @param operationArguments - Operation arguments + * @param operationSpec - Operation Spec + * @returns + */ + async sendOperationRequest(operationArguments, operationSpec) { + var _a; + const userProvidedCallBack = (_a = operationArguments === null || operationArguments === void 0 ? void 0 : operationArguments.options) === null || _a === void 0 ? void 0 : _a.onResponse; + let lastResponse; + function onResponse(rawResponse, flatResponse, error) { + lastResponse = rawResponse; + if (userProvidedCallBack) { + userProvidedCallBack(rawResponse, flatResponse, error); + } + } + operationArguments.options = Object.assign(Object.assign({}, operationArguments.options), { onResponse }); + const result = await super.sendOperationRequest(operationArguments, operationSpec); + if (lastResponse) { + Object.defineProperty(result, "_response", { + value: toCompatResponse(lastResponse), + }); + } + return result; + } +} +//# sourceMappingURL=extendedClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/esm/httpClientAdapter.js b/node_modules/@azure/core-http-compat/dist/esm/httpClientAdapter.js new file mode 100644 index 000000000..65ea2a830 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/esm/httpClientAdapter.js @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { toPipelineResponse } from "./response.js"; +import { toWebResourceLike } from "./util.js"; +/** + * Converts a RequestPolicy based HttpClient to a PipelineRequest based HttpClient. + * @param requestPolicyClient - A HttpClient compatible with core-http + * @returns A HttpClient compatible with core-rest-pipeline + */ +export function convertHttpClient(requestPolicyClient) { + return { + sendRequest: async (request) => { + const response = await requestPolicyClient.sendRequest(toWebResourceLike(request, { createProxy: true })); + return toPipelineResponse(response); + }, + }; +} +//# sourceMappingURL=httpClientAdapter.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/esm/index.js b/node_modules/@azure/core-http-compat/dist/esm/index.js new file mode 100644 index 000000000..697417600 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/esm/index.js @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A Shim Library that provides compatibility between Core V1 & V2 Packages. + * + * @packageDocumentation + */ +export { ExtendedServiceClient, } from "./extendedClient.js"; +export { requestPolicyFactoryPolicyName, createRequestPolicyFactoryPolicy, HttpPipelineLogLevel, } from "./policies/requestPolicyFactoryPolicy.js"; +export { disableKeepAlivePolicyName } from "./policies/disableKeepAlivePolicy.js"; +export { convertHttpClient } from "./httpClientAdapter.js"; +export { toHttpHeadersLike, } from "./util.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/esm/package.json b/node_modules/@azure/core-http-compat/dist/esm/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-http-compat/dist/esm/policies/disableKeepAlivePolicy.js b/node_modules/@azure/core-http-compat/dist/esm/policies/disableKeepAlivePolicy.js new file mode 100644 index 000000000..9c884add1 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/esm/policies/disableKeepAlivePolicy.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const disableKeepAlivePolicyName = "DisableKeepAlivePolicy"; +export function createDisableKeepAlivePolicy() { + return { + name: disableKeepAlivePolicyName, + async sendRequest(request, next) { + request.disableKeepAlive = true; + return next(request); + }, + }; +} +/** + * @internal + */ +export function pipelineContainsDisableKeepAlivePolicy(pipeline) { + return pipeline.getOrderedPolicies().some((policy) => policy.name === disableKeepAlivePolicyName); +} +//# sourceMappingURL=disableKeepAlivePolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/esm/policies/keepAliveOptions.js b/node_modules/@azure/core-http-compat/dist/esm/policies/keepAliveOptions.js new file mode 100644 index 000000000..fa48fc505 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/esm/policies/keepAliveOptions.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=keepAliveOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/esm/policies/redirectOptions.js b/node_modules/@azure/core-http-compat/dist/esm/policies/redirectOptions.js new file mode 100644 index 000000000..a532ff41c --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/esm/policies/redirectOptions.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=redirectOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/esm/policies/requestPolicyFactoryPolicy.js b/node_modules/@azure/core-http-compat/dist/esm/policies/requestPolicyFactoryPolicy.js new file mode 100644 index 000000000..e016492d5 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/esm/policies/requestPolicyFactoryPolicy.js @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { toPipelineRequest, toWebResourceLike } from "../util.js"; +import { toCompatResponse, toPipelineResponse } from "../response.js"; +/** + * An enum for compatibility with RequestPolicy + */ +export var HttpPipelineLogLevel; +(function (HttpPipelineLogLevel) { + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; +})(HttpPipelineLogLevel || (HttpPipelineLogLevel = {})); +const mockRequestPolicyOptions = { + log(_logLevel, _message) { + /* do nothing */ + }, + shouldLog(_logLevel) { + return false; + }, +}; +/** + * The name of the RequestPolicyFactoryPolicy + */ +export const requestPolicyFactoryPolicyName = "RequestPolicyFactoryPolicy"; +/** + * A policy that wraps policies written for core-http. + * @param factories - An array of `RequestPolicyFactory` objects from a core-http pipeline + */ +export function createRequestPolicyFactoryPolicy(factories) { + const orderedFactories = factories.slice().reverse(); + return { + name: requestPolicyFactoryPolicyName, + async sendRequest(request, next) { + let httpPipeline = { + async sendRequest(httpRequest) { + const response = await next(toPipelineRequest(httpRequest)); + return toCompatResponse(response, { createProxy: true }); + }, + }; + for (const factory of orderedFactories) { + httpPipeline = factory.create(httpPipeline, mockRequestPolicyOptions); + } + const webResourceLike = toWebResourceLike(request, { createProxy: true }); + const response = await httpPipeline.sendRequest(webResourceLike); + return toPipelineResponse(response); + }, + }; +} +//# sourceMappingURL=requestPolicyFactoryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/esm/response.js b/node_modules/@azure/core-http-compat/dist/esm/response.js new file mode 100644 index 000000000..7fe7d46ea --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/esm/response.js @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHttpHeaders } from "@azure/core-rest-pipeline"; +import { toHttpHeadersLike, toPipelineRequest, toWebResourceLike, } from "./util.js"; +const originalResponse = Symbol("Original FullOperationResponse"); +/** + * A helper to convert response objects from the new pipeline back to the old one. + * @param response - A response object from core-client. + * @returns A response compatible with `HttpOperationResponse` from core-http. + */ +export function toCompatResponse(response, options) { + let request = toWebResourceLike(response.request); + let headers = toHttpHeadersLike(response.headers); + if (options === null || options === void 0 ? void 0 : options.createProxy) { + return new Proxy(response, { + get(target, prop, receiver) { + if (prop === "headers") { + return headers; + } + else if (prop === "request") { + return request; + } + else if (prop === originalResponse) { + return response; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "headers") { + headers = value; + } + else if (prop === "request") { + request = value; + } + return Reflect.set(target, prop, value, receiver); + }, + }); + } + else { + return Object.assign(Object.assign({}, response), { request, + headers }); + } +} +/** + * A helper to convert back to a PipelineResponse + * @param compatResponse - A response compatible with `HttpOperationResponse` from core-http. + */ +export function toPipelineResponse(compatResponse) { + const extendedCompatResponse = compatResponse; + const response = extendedCompatResponse[originalResponse]; + const headers = createHttpHeaders(compatResponse.headers.toJson({ preserveCase: true })); + if (response) { + response.headers = headers; + return response; + } + else { + return Object.assign(Object.assign({}, compatResponse), { headers, request: toPipelineRequest(compatResponse.request) }); + } +} +//# sourceMappingURL=response.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/esm/util.js b/node_modules/@azure/core-http-compat/dist/esm/util.js new file mode 100644 index 000000000..e47fa99e8 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/esm/util.js @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHttpHeaders, createPipelineRequest, } from "@azure/core-rest-pipeline"; +// We use a custom symbol to cache a reference to the original request without +// exposing it on the public interface. +const originalRequestSymbol = Symbol("Original PipelineRequest"); +// Symbol.for() will return the same symbol if it's already been created +// This particular one is used in core-client to handle the case of when a request is +// cloned but we need to retrieve the OperationSpec and OperationArguments from the +// original request. +const originalClientRequestSymbol = Symbol.for("@azure/core-client original request"); +export function toPipelineRequest(webResource, options = {}) { + const compatWebResource = webResource; + const request = compatWebResource[originalRequestSymbol]; + const headers = createHttpHeaders(webResource.headers.toJson({ preserveCase: true })); + if (request) { + request.headers = headers; + return request; + } + else { + const newRequest = createPipelineRequest({ + url: webResource.url, + method: webResource.method, + headers, + withCredentials: webResource.withCredentials, + timeout: webResource.timeout, + requestId: webResource.requestId, + abortSignal: webResource.abortSignal, + body: webResource.body, + formData: webResource.formData, + disableKeepAlive: !!webResource.keepAlive, + onDownloadProgress: webResource.onDownloadProgress, + onUploadProgress: webResource.onUploadProgress, + proxySettings: webResource.proxySettings, + streamResponseStatusCodes: webResource.streamResponseStatusCodes, + }); + if (options.originalRequest) { + newRequest[originalClientRequestSymbol] = + options.originalRequest; + } + return newRequest; + } +} +export function toWebResourceLike(request, options) { + var _a; + const originalRequest = (_a = options === null || options === void 0 ? void 0 : options.originalRequest) !== null && _a !== void 0 ? _a : request; + const webResource = { + url: request.url, + method: request.method, + headers: toHttpHeadersLike(request.headers), + withCredentials: request.withCredentials, + timeout: request.timeout, + requestId: request.headers.get("x-ms-client-request-id") || request.requestId, + abortSignal: request.abortSignal, + body: request.body, + formData: request.formData, + keepAlive: !!request.disableKeepAlive, + onDownloadProgress: request.onDownloadProgress, + onUploadProgress: request.onUploadProgress, + proxySettings: request.proxySettings, + streamResponseStatusCodes: request.streamResponseStatusCodes, + clone() { + throw new Error("Cannot clone a non-proxied WebResourceLike"); + }, + prepare() { + throw new Error("WebResourceLike.prepare() is not supported by @azure/core-http-compat"); + }, + validateRequestProperties() { + /** do nothing */ + }, + }; + if (options === null || options === void 0 ? void 0 : options.createProxy) { + return new Proxy(webResource, { + get(target, prop, receiver) { + if (prop === originalRequestSymbol) { + return request; + } + else if (prop === "clone") { + return () => { + return toWebResourceLike(toPipelineRequest(webResource, { originalRequest }), { + createProxy: true, + originalRequest, + }); + }; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "keepAlive") { + request.disableKeepAlive = !value; + } + const passThroughProps = [ + "url", + "method", + "withCredentials", + "timeout", + "requestId", + "abortSignal", + "body", + "formData", + "onDownloadProgress", + "onUploadProgress", + "proxySettings", + "streamResponseStatusCodes", + ]; + if (typeof prop === "string" && passThroughProps.includes(prop)) { + request[prop] = value; + } + return Reflect.set(target, prop, value, receiver); + }, + }); + } + else { + return webResource; + } +} +/** + * Converts HttpHeaders from core-rest-pipeline to look like + * HttpHeaders from core-http. + * @param headers - HttpHeaders from core-rest-pipeline + * @returns HttpHeaders as they looked in core-http + */ +export function toHttpHeadersLike(headers) { + return new HttpHeaders(headers.toJSON({ preserveCase: true })); +} +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +/** + * A collection of HTTP header key/value pairs. + */ +export class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); + } +} +//# sourceMappingURL=util.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/react-native/extendedClient.js b/node_modules/@azure/core-http-compat/dist/react-native/extendedClient.js new file mode 100644 index 000000000..7995e4e9c --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/react-native/extendedClient.js @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createDisableKeepAlivePolicy, pipelineContainsDisableKeepAlivePolicy, } from "./policies/disableKeepAlivePolicy.js"; +import { redirectPolicyName } from "@azure/core-rest-pipeline"; +import { ServiceClient, } from "@azure/core-client"; +import { toCompatResponse } from "./response.js"; +/** + * Client to provide compatability between core V1 & V2. + */ +export class ExtendedServiceClient extends ServiceClient { + constructor(options) { + var _a, _b; + super(options); + if (((_a = options.keepAliveOptions) === null || _a === void 0 ? void 0 : _a.enable) === false && + !pipelineContainsDisableKeepAlivePolicy(this.pipeline)) { + this.pipeline.addPolicy(createDisableKeepAlivePolicy()); + } + if (((_b = options.redirectOptions) === null || _b === void 0 ? void 0 : _b.handleRedirects) === false) { + this.pipeline.removePolicy({ + name: redirectPolicyName, + }); + } + } + /** + * Compatible send operation request function. + * + * @param operationArguments - Operation arguments + * @param operationSpec - Operation Spec + * @returns + */ + async sendOperationRequest(operationArguments, operationSpec) { + var _a; + const userProvidedCallBack = (_a = operationArguments === null || operationArguments === void 0 ? void 0 : operationArguments.options) === null || _a === void 0 ? void 0 : _a.onResponse; + let lastResponse; + function onResponse(rawResponse, flatResponse, error) { + lastResponse = rawResponse; + if (userProvidedCallBack) { + userProvidedCallBack(rawResponse, flatResponse, error); + } + } + operationArguments.options = Object.assign(Object.assign({}, operationArguments.options), { onResponse }); + const result = await super.sendOperationRequest(operationArguments, operationSpec); + if (lastResponse) { + Object.defineProperty(result, "_response", { + value: toCompatResponse(lastResponse), + }); + } + return result; + } +} +//# sourceMappingURL=extendedClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/react-native/httpClientAdapter.js b/node_modules/@azure/core-http-compat/dist/react-native/httpClientAdapter.js new file mode 100644 index 000000000..65ea2a830 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/react-native/httpClientAdapter.js @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { toPipelineResponse } from "./response.js"; +import { toWebResourceLike } from "./util.js"; +/** + * Converts a RequestPolicy based HttpClient to a PipelineRequest based HttpClient. + * @param requestPolicyClient - A HttpClient compatible with core-http + * @returns A HttpClient compatible with core-rest-pipeline + */ +export function convertHttpClient(requestPolicyClient) { + return { + sendRequest: async (request) => { + const response = await requestPolicyClient.sendRequest(toWebResourceLike(request, { createProxy: true })); + return toPipelineResponse(response); + }, + }; +} +//# sourceMappingURL=httpClientAdapter.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/react-native/index.js b/node_modules/@azure/core-http-compat/dist/react-native/index.js new file mode 100644 index 000000000..697417600 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/react-native/index.js @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A Shim Library that provides compatibility between Core V1 & V2 Packages. + * + * @packageDocumentation + */ +export { ExtendedServiceClient, } from "./extendedClient.js"; +export { requestPolicyFactoryPolicyName, createRequestPolicyFactoryPolicy, HttpPipelineLogLevel, } from "./policies/requestPolicyFactoryPolicy.js"; +export { disableKeepAlivePolicyName } from "./policies/disableKeepAlivePolicy.js"; +export { convertHttpClient } from "./httpClientAdapter.js"; +export { toHttpHeadersLike, } from "./util.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/react-native/package.json b/node_modules/@azure/core-http-compat/dist/react-native/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-http-compat/dist/react-native/policies/disableKeepAlivePolicy.js b/node_modules/@azure/core-http-compat/dist/react-native/policies/disableKeepAlivePolicy.js new file mode 100644 index 000000000..9c884add1 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/react-native/policies/disableKeepAlivePolicy.js @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const disableKeepAlivePolicyName = "DisableKeepAlivePolicy"; +export function createDisableKeepAlivePolicy() { + return { + name: disableKeepAlivePolicyName, + async sendRequest(request, next) { + request.disableKeepAlive = true; + return next(request); + }, + }; +} +/** + * @internal + */ +export function pipelineContainsDisableKeepAlivePolicy(pipeline) { + return pipeline.getOrderedPolicies().some((policy) => policy.name === disableKeepAlivePolicyName); +} +//# sourceMappingURL=disableKeepAlivePolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/react-native/policies/keepAliveOptions.js b/node_modules/@azure/core-http-compat/dist/react-native/policies/keepAliveOptions.js new file mode 100644 index 000000000..fa48fc505 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/react-native/policies/keepAliveOptions.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=keepAliveOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/react-native/policies/redirectOptions.js b/node_modules/@azure/core-http-compat/dist/react-native/policies/redirectOptions.js new file mode 100644 index 000000000..a532ff41c --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/react-native/policies/redirectOptions.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=redirectOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/react-native/policies/requestPolicyFactoryPolicy.js b/node_modules/@azure/core-http-compat/dist/react-native/policies/requestPolicyFactoryPolicy.js new file mode 100644 index 000000000..e016492d5 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/react-native/policies/requestPolicyFactoryPolicy.js @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { toPipelineRequest, toWebResourceLike } from "../util.js"; +import { toCompatResponse, toPipelineResponse } from "../response.js"; +/** + * An enum for compatibility with RequestPolicy + */ +export var HttpPipelineLogLevel; +(function (HttpPipelineLogLevel) { + HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; + HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; + HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; + HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; +})(HttpPipelineLogLevel || (HttpPipelineLogLevel = {})); +const mockRequestPolicyOptions = { + log(_logLevel, _message) { + /* do nothing */ + }, + shouldLog(_logLevel) { + return false; + }, +}; +/** + * The name of the RequestPolicyFactoryPolicy + */ +export const requestPolicyFactoryPolicyName = "RequestPolicyFactoryPolicy"; +/** + * A policy that wraps policies written for core-http. + * @param factories - An array of `RequestPolicyFactory` objects from a core-http pipeline + */ +export function createRequestPolicyFactoryPolicy(factories) { + const orderedFactories = factories.slice().reverse(); + return { + name: requestPolicyFactoryPolicyName, + async sendRequest(request, next) { + let httpPipeline = { + async sendRequest(httpRequest) { + const response = await next(toPipelineRequest(httpRequest)); + return toCompatResponse(response, { createProxy: true }); + }, + }; + for (const factory of orderedFactories) { + httpPipeline = factory.create(httpPipeline, mockRequestPolicyOptions); + } + const webResourceLike = toWebResourceLike(request, { createProxy: true }); + const response = await httpPipeline.sendRequest(webResourceLike); + return toPipelineResponse(response); + }, + }; +} +//# sourceMappingURL=requestPolicyFactoryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/react-native/response.js b/node_modules/@azure/core-http-compat/dist/react-native/response.js new file mode 100644 index 000000000..7fe7d46ea --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/react-native/response.js @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHttpHeaders } from "@azure/core-rest-pipeline"; +import { toHttpHeadersLike, toPipelineRequest, toWebResourceLike, } from "./util.js"; +const originalResponse = Symbol("Original FullOperationResponse"); +/** + * A helper to convert response objects from the new pipeline back to the old one. + * @param response - A response object from core-client. + * @returns A response compatible with `HttpOperationResponse` from core-http. + */ +export function toCompatResponse(response, options) { + let request = toWebResourceLike(response.request); + let headers = toHttpHeadersLike(response.headers); + if (options === null || options === void 0 ? void 0 : options.createProxy) { + return new Proxy(response, { + get(target, prop, receiver) { + if (prop === "headers") { + return headers; + } + else if (prop === "request") { + return request; + } + else if (prop === originalResponse) { + return response; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "headers") { + headers = value; + } + else if (prop === "request") { + request = value; + } + return Reflect.set(target, prop, value, receiver); + }, + }); + } + else { + return Object.assign(Object.assign({}, response), { request, + headers }); + } +} +/** + * A helper to convert back to a PipelineResponse + * @param compatResponse - A response compatible with `HttpOperationResponse` from core-http. + */ +export function toPipelineResponse(compatResponse) { + const extendedCompatResponse = compatResponse; + const response = extendedCompatResponse[originalResponse]; + const headers = createHttpHeaders(compatResponse.headers.toJson({ preserveCase: true })); + if (response) { + response.headers = headers; + return response; + } + else { + return Object.assign(Object.assign({}, compatResponse), { headers, request: toPipelineRequest(compatResponse.request) }); + } +} +//# sourceMappingURL=response.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/dist/react-native/util.js b/node_modules/@azure/core-http-compat/dist/react-native/util.js new file mode 100644 index 000000000..e47fa99e8 --- /dev/null +++ b/node_modules/@azure/core-http-compat/dist/react-native/util.js @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHttpHeaders, createPipelineRequest, } from "@azure/core-rest-pipeline"; +// We use a custom symbol to cache a reference to the original request without +// exposing it on the public interface. +const originalRequestSymbol = Symbol("Original PipelineRequest"); +// Symbol.for() will return the same symbol if it's already been created +// This particular one is used in core-client to handle the case of when a request is +// cloned but we need to retrieve the OperationSpec and OperationArguments from the +// original request. +const originalClientRequestSymbol = Symbol.for("@azure/core-client original request"); +export function toPipelineRequest(webResource, options = {}) { + const compatWebResource = webResource; + const request = compatWebResource[originalRequestSymbol]; + const headers = createHttpHeaders(webResource.headers.toJson({ preserveCase: true })); + if (request) { + request.headers = headers; + return request; + } + else { + const newRequest = createPipelineRequest({ + url: webResource.url, + method: webResource.method, + headers, + withCredentials: webResource.withCredentials, + timeout: webResource.timeout, + requestId: webResource.requestId, + abortSignal: webResource.abortSignal, + body: webResource.body, + formData: webResource.formData, + disableKeepAlive: !!webResource.keepAlive, + onDownloadProgress: webResource.onDownloadProgress, + onUploadProgress: webResource.onUploadProgress, + proxySettings: webResource.proxySettings, + streamResponseStatusCodes: webResource.streamResponseStatusCodes, + }); + if (options.originalRequest) { + newRequest[originalClientRequestSymbol] = + options.originalRequest; + } + return newRequest; + } +} +export function toWebResourceLike(request, options) { + var _a; + const originalRequest = (_a = options === null || options === void 0 ? void 0 : options.originalRequest) !== null && _a !== void 0 ? _a : request; + const webResource = { + url: request.url, + method: request.method, + headers: toHttpHeadersLike(request.headers), + withCredentials: request.withCredentials, + timeout: request.timeout, + requestId: request.headers.get("x-ms-client-request-id") || request.requestId, + abortSignal: request.abortSignal, + body: request.body, + formData: request.formData, + keepAlive: !!request.disableKeepAlive, + onDownloadProgress: request.onDownloadProgress, + onUploadProgress: request.onUploadProgress, + proxySettings: request.proxySettings, + streamResponseStatusCodes: request.streamResponseStatusCodes, + clone() { + throw new Error("Cannot clone a non-proxied WebResourceLike"); + }, + prepare() { + throw new Error("WebResourceLike.prepare() is not supported by @azure/core-http-compat"); + }, + validateRequestProperties() { + /** do nothing */ + }, + }; + if (options === null || options === void 0 ? void 0 : options.createProxy) { + return new Proxy(webResource, { + get(target, prop, receiver) { + if (prop === originalRequestSymbol) { + return request; + } + else if (prop === "clone") { + return () => { + return toWebResourceLike(toPipelineRequest(webResource, { originalRequest }), { + createProxy: true, + originalRequest, + }); + }; + } + return Reflect.get(target, prop, receiver); + }, + set(target, prop, value, receiver) { + if (prop === "keepAlive") { + request.disableKeepAlive = !value; + } + const passThroughProps = [ + "url", + "method", + "withCredentials", + "timeout", + "requestId", + "abortSignal", + "body", + "formData", + "onDownloadProgress", + "onUploadProgress", + "proxySettings", + "streamResponseStatusCodes", + ]; + if (typeof prop === "string" && passThroughProps.includes(prop)) { + request[prop] = value; + } + return Reflect.set(target, prop, value, receiver); + }, + }); + } + else { + return webResource; + } +} +/** + * Converts HttpHeaders from core-rest-pipeline to look like + * HttpHeaders from core-http. + * @param headers - HttpHeaders from core-rest-pipeline + * @returns HttpHeaders as they looked in core-http + */ +export function toHttpHeadersLike(headers) { + return new HttpHeaders(headers.toJSON({ preserveCase: true })); +} +/** + * A collection of HttpHeaders that can be sent with a HTTP request. + */ +function getHeaderKey(headerName) { + return headerName.toLowerCase(); +} +/** + * A collection of HTTP header key/value pairs. + */ +export class HttpHeaders { + constructor(rawHeaders) { + this._headersMap = {}; + if (rawHeaders) { + for (const headerName in rawHeaders) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param headerName - The name of the header to set. This value is case-insensitive. + * @param headerValue - The value of the header to set. + */ + set(headerName, headerValue) { + this._headersMap[getHeaderKey(headerName)] = { + name: headerName, + value: headerValue.toString(), + }; + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param headerName - The name of the header. + */ + get(headerName) { + const header = this._headersMap[getHeaderKey(headerName)]; + return !header ? undefined : header.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + */ + contains(headerName) { + return !!this._headersMap[getHeaderKey(headerName)]; + } + /** + * Remove the header with the provided headerName. Return whether or not the header existed and + * was removed. + * @param headerName - The name of the header to remove. + */ + remove(headerName) { + const result = this.contains(headerName); + delete this._headersMap[getHeaderKey(headerName)]; + return result; + } + /** + * Get the headers that are contained this collection as an object. + */ + rawHeaders() { + return this.toJson({ preserveCase: true }); + } + /** + * Get the headers that are contained in this collection as an array. + */ + headersArray() { + const headers = []; + for (const headerKey in this._headersMap) { + headers.push(this._headersMap[headerKey]); + } + return headers; + } + /** + * Get the header names that are contained in this collection. + */ + headerNames() { + const headerNames = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerNames.push(headers[i].name); + } + return headerNames; + } + /** + * Get the header values that are contained in this collection. + */ + headerValues() { + const headerValues = []; + const headers = this.headersArray(); + for (let i = 0; i < headers.length; ++i) { + headerValues.push(headers[i].value); + } + return headerValues; + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJson({ preserveCase: true })); + } + /** + * Create a deep clone/copy of this HttpHeaders collection. + */ + clone() { + const resultPreservingCasing = {}; + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + resultPreservingCasing[header.name] = header.value; + } + return new HttpHeaders(resultPreservingCasing); + } +} +//# sourceMappingURL=util.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http-compat/package.json b/node_modules/@azure/core-http-compat/package.json new file mode 100644 index 000000000..12ce73573 --- /dev/null +++ b/node_modules/@azure/core-http-compat/package.json @@ -0,0 +1,113 @@ +{ + "name": "@azure/core-http-compat", + "version": "2.1.2", + "description": "Core HTTP Compatibility Library to bridge the gap between Core V1 & V2 packages.", + "sdk-type": "client", + "type": "module", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "browser": "./dist/browser/index.js", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "files": [ + "dist/", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "engines": { + "node": ">=18.0.0" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-compat/", + "sideEffects": false, + "prettier": "@azure/eslint-plugin-azure-sdk/prettier.json", + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && api-extractor run --local", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,mts}\" \"test/**/*.{ts,mts}\" \"*.{js,cjs,mjs,json}\"", + "clean": "rimraf --glob dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tshy && api-extractor run --local", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,mts}\" \"test/**/*.{ts,mts}\" \"*.{js,cjs,mjs,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "echo skipped", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts", + "pack": "npm pack 2>&1", + "test:browser": "echo skipped", + "test:node": "echo skipped", + "test": "npm run clean && npm run build && npm run unit-test:node && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-rest-pipeline": "^1.3.0", + "@azure/core-client": "^1.3.0" + }, + "devDependencies": { + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure/test-utils": "^1.0.0", + "@azure/dev-tool": "^1.0.0", + "@microsoft/api-extractor": "^7.40.3", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "@types/node": "^18.0.0", + "eslint": "^8.56.0", + "playwright": "^1.41.2", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tshy": "^1.13.0", + "typescript": "~5.3.3", + "vitest": "^1.3.1" + }, + "//metadata": { + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false + } +} diff --git a/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js b/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js deleted file mode 100644 index 5deff577f..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/createSpanLegacy.js +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// NOTE: we've moved this code into core-tracing but these functions -// were a part of the GA'd library and can't be removed until the next major -// release. They currently get called always, even if tracing is not enabled. -import { createSpanFunction as coreTracingCreateSpanFunction } from "@azure/core-tracing"; -/** - * This function is only here for compatibility. Use createSpanFunction in core-tracing. - * - * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. - * @hidden - - * @param spanConfig - The name of the operation being performed. - * @param tracingOptions - The options for the underlying http request. - */ -export function createSpanFunction(args) { - return coreTracingCreateSpanFunction(args); -} -//# sourceMappingURL=createSpanLegacy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js deleted file mode 100644 index 1ac13dd8e..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenCache.js +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Defines the default token refresh buffer duration. - */ -export const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes -/** - * Provides an {@link AccessTokenCache} implementation which clears - * the cached {@link AccessToken}'s after the expiresOnTimestamp has - * passed. - * - * @deprecated No longer used in the bearer authorization policy. - */ -export class ExpiringAccessTokenCache { - /** - * Constructs an instance of {@link ExpiringAccessTokenCache} with - * an optional expiration buffer time. - */ - constructor(tokenRefreshBufferMs = TokenRefreshBufferMs) { - this.cachedToken = undefined; - this.tokenRefreshBufferMs = tokenRefreshBufferMs; - } - /** - * Saves an access token into the internal in-memory cache. - * @param accessToken - Access token or undefined to clear the cache. - */ - setCachedToken(accessToken) { - this.cachedToken = accessToken; - } - /** - * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. - */ - getCachedToken() { - if (this.cachedToken && - Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { - this.cachedToken = undefined; - } - return this.cachedToken; - } -} -//# sourceMappingURL=accessTokenCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js b/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js deleted file mode 100644 index a866fb9d1..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/credentials/accessTokenRefresher.js +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. - * - * @deprecated No longer used in the bearer authorization policy. - */ -export class AccessTokenRefresher { - constructor(credential, scopes, requiredMillisecondsBeforeNewRefresh = 30000) { - this.credential = credential; - this.scopes = scopes; - this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh; - this.lastCalled = 0; - } - /** - * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying - * that we are ready for a new refresh. - */ - isReady() { - // We're only ready for a new refresh if the required milliseconds have passed. - return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh); - } - /** - * Stores the time in which it is called, - * then requests a new token, - * then sets this.promise to undefined, - * then returns the token. - */ - async getToken(options) { - this.lastCalled = Date.now(); - const token = await this.credential.getToken(this.scopes, options); - this.promise = undefined; - return token || undefined; - } - /** - * Requests a new token if we're not currently waiting for a new token. - * Returns null if the required time between each call hasn't been reached. - */ - refresh(options) { - if (!this.promise) { - this.promise = this.getToken(options); - } - return this.promise; - } -} -//# sourceMappingURL=accessTokenRefresher.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js deleted file mode 100644 index d3f2a4fae..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/credentials/apiKeyCredentials.js +++ /dev/null @@ -1,53 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { HttpHeaders } from "../httpHeaders"; -/** - * Authenticates to a service using an API key. - */ -export class ApiKeyCredentials { - /** - * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. - */ - constructor(options) { - if (!options || (options && !options.inHeader && !options.inQuery)) { - throw new Error(`options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.`); - } - this.inHeader = options.inHeader; - this.inQuery = options.inQuery; - } - /** - * Signs a request with the values provided in the inHeader and inQuery parameter. - * - * @param webResource - The WebResourceLike to be signed. - * @returns The signed request object. - */ - signRequest(webResource) { - if (!webResource) { - return Promise.reject(new Error(`webResource cannot be null or undefined and must be of type "object".`)); - } - if (this.inHeader) { - if (!webResource.headers) { - webResource.headers = new HttpHeaders(); - } - for (const headerName in this.inHeader) { - webResource.headers.set(headerName, this.inHeader[headerName]); - } - } - if (this.inQuery) { - if (!webResource.url) { - return Promise.reject(new Error(`url cannot be null in the request object.`)); - } - if (webResource.url.indexOf("?") < 0) { - webResource.url += "?"; - } - for (const key in this.inQuery) { - if (!webResource.url.endsWith("?")) { - webResource.url += "&"; - } - webResource.url += `${key}=${this.inQuery[key]}`; - } - } - return Promise.resolve(webResource); - } -} -//# sourceMappingURL=apiKeyCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js deleted file mode 100644 index fc49c7255..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/credentials/basicAuthenticationCredentials.js +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import * as base64 from "../util/base64"; -import { Constants } from "../util/constants"; -import { HttpHeaders } from "../httpHeaders"; -const HeaderConstants = Constants.HeaderConstants; -const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; -/** - * A simple {@link ServiceClientCredential} that authenticates with a username and a password. - */ -export class BasicAuthenticationCredentials { - /** - * Creates a new BasicAuthenticationCredentials object. - * - * @param userName - User name. - * @param password - Password. - * @param authorizationScheme - The authorization scheme. - */ - constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { - /** - * Authorization scheme. Defaults to "Basic". - * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes - */ - this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; - if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { - throw new Error("userName cannot be null or undefined and must be of type string."); - } - if (password === null || password === undefined || typeof password.valueOf() !== "string") { - throw new Error("password cannot be null or undefined and must be of type string."); - } - this.userName = userName; - this.password = password; - this.authorizationScheme = authorizationScheme; - } - /** - * Signs a request with the Authentication header. - * - * @param webResource - The WebResourceLike to be signed. - * @returns The signed request object. - */ - signRequest(webResource) { - const credentials = `${this.userName}:${this.password}`; - const encodedCredentials = `${this.authorizationScheme} ${base64.encodeString(credentials)}`; - if (!webResource.headers) - webResource.headers = new HttpHeaders(); - webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); - return Promise.resolve(webResource); - } -} -//# sourceMappingURL=basicAuthenticationCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js deleted file mode 100644 index 3cfbd7a03..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/credentials/credentials.js +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -export {}; -//# sourceMappingURL=credentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js deleted file mode 100644 index f44565f43..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/credentials/serviceClientCredentials.js +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -export {}; -//# sourceMappingURL=serviceClientCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js b/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js deleted file mode 100644 index ac50243fa..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/credentials/topicCredentials.js +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { ApiKeyCredentials } from "./apiKeyCredentials"; -/** - * A {@link TopicCredentials} object used for Azure Event Grid. - */ -export class TopicCredentials extends ApiKeyCredentials { - /** - * Creates a new EventGrid TopicCredentials object. - * - * @param topicKey - The EventGrid topic key - */ - constructor(topicKey) { - if (!topicKey || (topicKey && typeof topicKey !== "string")) { - throw new Error("topicKey cannot be null or undefined and must be of type string."); - } - const options = { - inHeader: { - "aeg-sas-key": topicKey, - }, - }; - super(options); - } -} -//# sourceMappingURL=topicCredentials.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js deleted file mode 100644 index 1f5e7fafb..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.browser.js +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -export { XhrHttpClient as DefaultHttpClient } from "./xhrHttpClient"; -//# sourceMappingURL=defaultHttpClient.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js b/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js deleted file mode 100644 index 79177d54d..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/defaultHttpClient.js +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -export { NodeFetchHttpClient as DefaultHttpClient } from "./nodeFetchHttpClient"; -//# sourceMappingURL=defaultHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClient.js b/node_modules/@azure/core-http/dist-esm/src/httpClient.js deleted file mode 100644 index 8908881c3..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/httpClient.js +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -export {}; -//# sourceMappingURL=httpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js b/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js deleted file mode 100644 index 48ec59adc..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/httpClientCache.js +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { DefaultHttpClient } from "./defaultHttpClient"; -let cachedHttpClient; -export function getCachedDefaultHttpClient() { - if (!cachedHttpClient) { - cachedHttpClient = new DefaultHttpClient(); - } - return cachedHttpClient; -} -//# sourceMappingURL=httpClientCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js b/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js deleted file mode 100644 index 7f7716e2b..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/httpHeaders.js +++ /dev/null @@ -1,151 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * A collection of HttpHeaders that can be sent with a HTTP request. - */ -function getHeaderKey(headerName) { - return headerName.toLowerCase(); -} -export function isHttpHeadersLike(object) { - if (object && typeof object === "object") { - const castObject = object; - if (typeof castObject.rawHeaders === "function" && - typeof castObject.clone === "function" && - typeof castObject.get === "function" && - typeof castObject.set === "function" && - typeof castObject.contains === "function" && - typeof castObject.remove === "function" && - typeof castObject.headersArray === "function" && - typeof castObject.headerValues === "function" && - typeof castObject.headerNames === "function" && - typeof castObject.toJson === "function") { - return true; - } - } - return false; -} -/** - * A collection of HTTP header key/value pairs. - */ -export class HttpHeaders { - constructor(rawHeaders) { - this._headersMap = {}; - if (rawHeaders) { - for (const headerName in rawHeaders) { - this.set(headerName, rawHeaders[headerName]); - } - } - } - /** - * Set a header in this collection with the provided name and value. The name is - * case-insensitive. - * @param headerName - The name of the header to set. This value is case-insensitive. - * @param headerValue - The value of the header to set. - */ - set(headerName, headerValue) { - this._headersMap[getHeaderKey(headerName)] = { - name: headerName, - value: headerValue.toString().trim(), - }; - } - /** - * Get the header value for the provided header name, or undefined if no header exists in this - * collection with the provided name. - * @param headerName - The name of the header. - */ - get(headerName) { - const header = this._headersMap[getHeaderKey(headerName)]; - return !header ? undefined : header.value; - } - /** - * Get whether or not this header collection contains a header entry for the provided header name. - */ - contains(headerName) { - return !!this._headersMap[getHeaderKey(headerName)]; - } - /** - * Remove the header with the provided headerName. Return whether or not the header existed and - * was removed. - * @param headerName - The name of the header to remove. - */ - remove(headerName) { - const result = this.contains(headerName); - delete this._headersMap[getHeaderKey(headerName)]; - return result; - } - /** - * Get the headers that are contained this collection as an object. - */ - rawHeaders() { - return this.toJson({ preserveCase: true }); - } - /** - * Get the headers that are contained in this collection as an array. - */ - headersArray() { - const headers = []; - for (const headerKey in this._headersMap) { - headers.push(this._headersMap[headerKey]); - } - return headers; - } - /** - * Get the header names that are contained in this collection. - */ - headerNames() { - const headerNames = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { - headerNames.push(headers[i].name); - } - return headerNames; - } - /** - * Get the header values that are contained in this collection. - */ - headerValues() { - const headerValues = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { - headerValues.push(headers[i].value); - } - return headerValues; - } - /** - * Get the JSON object representation of this HTTP header collection. - */ - toJson(options = {}) { - const result = {}; - if (options.preserveCase) { - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[header.name] = header.value; - } - } - else { - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[getHeaderKey(header.name)] = header.value; - } - } - return result; - } - /** - * Get the string representation of this HTTP header collection. - */ - toString() { - return JSON.stringify(this.toJson({ preserveCase: true })); - } - /** - * Create a deep clone/copy of this HttpHeaders collection. - */ - clone() { - const resultPreservingCasing = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - resultPreservingCasing[header.name] = header.value; - } - return new HttpHeaders(resultPreservingCasing); - } -} -//# sourceMappingURL=httpHeaders.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js b/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js deleted file mode 100644 index 540684d9b..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/httpOperationResponse.js +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -export {}; -//# sourceMappingURL=httpOperationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js deleted file mode 100644 index 4e70194ea..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogLevel.js +++ /dev/null @@ -1,25 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The different levels of logs that can be used with the HttpPipelineLogger. - */ -export var HttpPipelineLogLevel; -(function (HttpPipelineLogLevel) { - /** - * A log level that indicates that no logs will be logged. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; - /** - * An error log. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; - /** - * A warning log. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; - /** - * An information log. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; -})(HttpPipelineLogLevel || (HttpPipelineLogLevel = {})); -//# sourceMappingURL=httpPipelineLogLevel.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js b/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js deleted file mode 100644 index 6f61f81d3..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/httpPipelineLogger.js +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; -/** - * A HttpPipelineLogger that will send its logs to the console. - */ -export class ConsoleHttpPipelineLogger { - /** - * Create a new ConsoleHttpPipelineLogger. - * @param minimumLogLevel - The log level threshold for what logs will be logged. - */ - constructor(minimumLogLevel) { - this.minimumLogLevel = minimumLogLevel; - } - /** - * Log the provided message. - * @param logLevel - The HttpLogDetailLevel associated with this message. - * @param message - The message to log. - */ - log(logLevel, message) { - const logMessage = `${HttpPipelineLogLevel[logLevel]}: ${message}`; - switch (logLevel) { - case HttpPipelineLogLevel.ERROR: - console.error(logMessage); - break; - case HttpPipelineLogLevel.WARNING: - console.warn(logMessage); - break; - case HttpPipelineLogLevel.INFO: - console.log(logMessage); - break; - } - } -} -//# sourceMappingURL=httpPipelineLogger.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/index.js b/node_modules/@azure/core-http/dist-esm/src/index.js deleted file mode 100644 index f0a3a0528..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/index.js +++ /dev/null @@ -1,45 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/* eslint-disable-next-line @typescript-eslint/triple-slash-reference */ -/// -export { WebResource, } from "./webResource"; -export { DefaultHttpClient } from "./defaultHttpClient"; -export { HttpHeaders } from "./httpHeaders"; -export { HttpPipelineLogLevel } from "./httpPipelineLogLevel"; -export { RestError } from "./restError"; -export { operationOptionsToRequestOptionsBase, } from "./operationOptions"; -export { ServiceClient, flattenResponse, createPipelineFromOptions, } from "./serviceClient"; -export { QueryCollectionFormat } from "./queryCollectionFormat"; -export { Constants } from "./util/constants"; -export { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; -export { logPolicy } from "./policies/logPolicy"; -export { BaseRequestPolicy, RequestPolicyOptions, } from "./policies/requestPolicy"; -export { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; -export { exponentialRetryPolicy, RetryMode } from "./policies/exponentialRetryPolicy"; -export { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; -export { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; -export { getDefaultProxySettings, proxyPolicy } from "./policies/proxyPolicy"; -export { redirectPolicy } from "./policies/redirectPolicy"; -export { keepAlivePolicy } from "./policies/keepAlivePolicy"; -export { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; -export { signingPolicy } from "./policies/signingPolicy"; -export { userAgentPolicy, getDefaultUserAgentValue, } from "./policies/userAgentPolicy"; -export { deserializationPolicy, deserializeResponseBody, } from "./policies/deserializationPolicy"; -export { tracingPolicy } from "./policies/tracingPolicy"; -export { MapperType, Serializer, serializeObject, } from "./serializer"; -export { stripRequest, stripResponse, executePromisesSequentially, generateUuid, encodeUri, promiseToCallback, promiseToServiceCallback, isValidUuid, applyMixins, isDuration, } from "./util/utils"; -export { isNode } from "@azure/core-util"; -export { URLBuilder, URLQuery } from "./url"; -export { delay } from "@azure/core-util"; -// legacy exports. Use core-tracing instead (and remove on next major version update of core-http). -export { createSpanFunction } from "./createSpanLegacy"; -// Credentials -export { isTokenCredential } from "@azure/core-auth"; -export { ExpiringAccessTokenCache } from "./credentials/accessTokenCache"; -export { AccessTokenRefresher } from "./credentials/accessTokenRefresher"; -export { BasicAuthenticationCredentials } from "./credentials/basicAuthenticationCredentials"; -export { ApiKeyCredentials } from "./credentials/apiKeyCredentials"; -export { TopicCredentials } from "./credentials/topicCredentials"; -export { parseXML, stringifyXML } from "./util/xml"; -export { XML_ATTRKEY, XML_CHARKEY } from "./util/serializer.common"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/log.js b/node_modules/@azure/core-http/dist-esm/src/log.js deleted file mode 100644 index 10dd4222b..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/log.js +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { createClientLogger } from "@azure/logger"; -export const logger = createClientLogger("core-http"); -//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js b/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js deleted file mode 100644 index 8f1d6ceb0..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/nodeFetchHttpClient.js +++ /dev/null @@ -1,288 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import * as http from "http"; -import * as https from "https"; -import { AbortController, AbortError } from "@azure/abort-controller"; -import { HttpHeaders } from "./httpHeaders"; -import { createProxyAgent, isUrlHttps } from "./proxyAgent"; -import { Transform } from "stream"; -import FormData from "form-data"; -import { RestError } from "./restError"; -import { logger } from "./log"; -import node_fetch from "node-fetch"; -function getCachedAgent(isHttps, agentCache) { - return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; -} -export class ReportTransform extends Transform { - constructor(progressCallback) { - super(); - this.progressCallback = progressCallback; - this.loadedBytes = 0; - } - _transform(chunk, _encoding, callback) { - this.push(chunk); - this.loadedBytes += chunk.length; - this.progressCallback({ loadedBytes: this.loadedBytes }); - callback(undefined); - } -} -function isReadableStream(body) { - return body && typeof body.pipe === "function"; -} -function isStreamComplete(stream, aborter) { - return new Promise((resolve) => { - stream.once("close", () => { - aborter === null || aborter === void 0 ? void 0 : aborter.abort(); - resolve(); - }); - stream.once("end", resolve); - stream.once("error", resolve); - }); -} -/** - * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} - */ -export function parseHeaders(headers) { - const httpHeaders = new HttpHeaders(); - headers.forEach((value, key) => { - httpHeaders.set(key, value); - }); - return httpHeaders; -} -/** - * An HTTP client that uses `node-fetch`. - */ -export class NodeFetchHttpClient { - constructor() { - // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent - this.proxyAgentMap = new Map(); - this.keepAliveAgents = {}; - } - /** - * Provides minimum viable error handling and the logic that executes the abstract methods. - * @param httpRequest - Object representing the outgoing HTTP request. - * @returns An object representing the incoming HTTP response. - */ - async sendRequest(httpRequest) { - var _a; - if (!httpRequest && typeof httpRequest !== "object") { - throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); - } - const abortController = new AbortController(); - let abortListener; - if (httpRequest.abortSignal) { - if (httpRequest.abortSignal.aborted) { - throw new AbortError("The operation was aborted."); - } - abortListener = (event) => { - if (event.type === "abort") { - abortController.abort(); - } - }; - httpRequest.abortSignal.addEventListener("abort", abortListener); - } - if (httpRequest.timeout) { - setTimeout(() => { - abortController.abort(); - }, httpRequest.timeout); - } - if (httpRequest.formData) { - const formData = httpRequest.formData; - const requestForm = new FormData(); - const appendFormValue = (key, value) => { - // value function probably returns a stream so we can provide a fresh stream on each retry - if (typeof value === "function") { - value = value(); - } - if (value && - Object.prototype.hasOwnProperty.call(value, "value") && - Object.prototype.hasOwnProperty.call(value, "options")) { - requestForm.append(key, value.value, value.options); - } - else { - requestForm.append(key, value); - } - }; - for (const formKey of Object.keys(formData)) { - const formValue = formData[formKey]; - if (Array.isArray(formValue)) { - for (let j = 0; j < formValue.length; j++) { - appendFormValue(formKey, formValue[j]); - } - } - else { - appendFormValue(formKey, formValue); - } - } - httpRequest.body = requestForm; - httpRequest.formData = undefined; - const contentType = httpRequest.headers.get("Content-Type"); - if (contentType && contentType.indexOf("multipart/form-data") !== -1) { - if (typeof requestForm.getBoundary === "function") { - httpRequest.headers.set("Content-Type", `multipart/form-data; boundary=${requestForm.getBoundary()}`); - } - else { - // browser will automatically apply a suitable content-type header - httpRequest.headers.remove("Content-Type"); - } - } - } - let body = httpRequest.body - ? typeof httpRequest.body === "function" - ? httpRequest.body() - : httpRequest.body - : undefined; - if (httpRequest.onUploadProgress && httpRequest.body) { - const onUploadProgress = httpRequest.onUploadProgress; - const uploadReportStream = new ReportTransform(onUploadProgress); - if (isReadableStream(body)) { - body.pipe(uploadReportStream); - } - else { - uploadReportStream.end(body); - } - body = uploadReportStream; - } - const platformSpecificRequestInit = await this.prepareRequest(httpRequest); - const requestInit = Object.assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, - // the types for RequestInit are from the browser, which expects AbortSignal to - // have `reason` and `throwIfAborted`, but these don't exist on our polyfill - // for Node. - signal: abortController.signal, redirect: "manual" }, platformSpecificRequestInit); - let operationResponse; - try { - const response = await this.fetch(httpRequest.url, requestInit); - const headers = parseHeaders(response.headers); - const streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || - httpRequest.streamResponseBody; - operationResponse = { - headers: headers, - request: httpRequest, - status: response.status, - readableStreamBody: streaming - ? response.body - : undefined, - bodyAsText: !streaming ? await response.text() : undefined, - }; - const onDownloadProgress = httpRequest.onDownloadProgress; - if (onDownloadProgress) { - const responseBody = response.body || undefined; - if (isReadableStream(responseBody)) { - const downloadReportStream = new ReportTransform(onDownloadProgress); - responseBody.pipe(downloadReportStream); - operationResponse.readableStreamBody = downloadReportStream; - } - else { - const length = parseInt(headers.get("Content-Length")) || undefined; - if (length) { - // Calling callback for non-stream response for consistency with browser - onDownloadProgress({ loadedBytes: length }); - } - } - } - await this.processRequest(operationResponse); - return operationResponse; - } - catch (error) { - const fetchError = error; - if (fetchError.code === "ENOTFOUND") { - throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); - } - else if (fetchError.type === "aborted") { - throw new AbortError("The operation was aborted."); - } - throw fetchError; - } - finally { - // clean up event listener - if (httpRequest.abortSignal && abortListener) { - let uploadStreamDone = Promise.resolve(); - if (isReadableStream(body)) { - uploadStreamDone = isStreamComplete(body); - } - let downloadStreamDone = Promise.resolve(); - if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { - downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController); - } - Promise.all([uploadStreamDone, downloadStreamDone]) - .then(() => { - var _a; - (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); - return; - }) - .catch((e) => { - logger.warning("Error when cleaning up abortListener on httpRequest", e); - }); - } - } - } - getOrCreateAgent(httpRequest) { - var _a; - const isHttps = isUrlHttps(httpRequest.url); - // At the moment, proxy settings and keepAlive are mutually - // exclusive because the 'tunnel' library currently lacks the - // ability to create a proxy with keepAlive turned on. - if (httpRequest.proxySettings) { - const { host, port, username, password } = httpRequest.proxySettings; - const key = `${host}:${port}:${username}:${password}`; - const proxyAgents = (_a = this.proxyAgentMap.get(key)) !== null && _a !== void 0 ? _a : {}; - let agent = getCachedAgent(isHttps, proxyAgents); - if (agent) { - return agent; - } - const tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); - agent = tunnel.agent; - if (tunnel.isHttps) { - proxyAgents.httpsAgent = tunnel.agent; - } - else { - proxyAgents.httpAgent = tunnel.agent; - } - this.proxyAgentMap.set(key, proxyAgents); - return agent; - } - else if (httpRequest.keepAlive) { - let agent = getCachedAgent(isHttps, this.keepAliveAgents); - if (agent) { - return agent; - } - const agentOptions = { - keepAlive: httpRequest.keepAlive, - }; - if (isHttps) { - agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions); - } - else { - agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions); - } - return agent; - } - else { - return isHttps ? https.globalAgent : http.globalAgent; - } - } - /** - * Uses `node-fetch` to perform the request. - */ - // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs - async fetch(input, init) { - return node_fetch(input, init); - } - /** - * Prepares a request based on the provided web resource. - */ - async prepareRequest(httpRequest) { - const requestInit = {}; - // Set the http(s) agent - requestInit.agent = this.getOrCreateAgent(httpRequest); - requestInit.compress = httpRequest.decompressResponse; - return requestInit; - } - /** - * Process an HTTP response. - */ - async processRequest(_operationResponse) { - /* no_op */ - } -} -//# sourceMappingURL=nodeFetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationArguments.js b/node_modules/@azure/core-http/dist-esm/src/operationArguments.js deleted file mode 100644 index 21722f3c8..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/operationArguments.js +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -export {}; -//# sourceMappingURL=operationArguments.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationOptions.js b/node_modules/@azure/core-http/dist-esm/src/operationOptions.js deleted file mode 100644 index 041c56bcb..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/operationOptions.js +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { __rest } from "tslib"; -/** - * Converts an OperationOptions to a RequestOptionsBase - * - * @param opts - OperationOptions object to convert to RequestOptionsBase - */ -export function operationOptionsToRequestOptionsBase(opts) { - const { requestOptions, tracingOptions } = opts, additionalOptions = __rest(opts, ["requestOptions", "tracingOptions"]); - let result = additionalOptions; - if (requestOptions) { - result = Object.assign(Object.assign({}, result), requestOptions); - } - if (tracingOptions) { - result.tracingContext = tracingOptions.tracingContext; - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - result.spanOptions = tracingOptions === null || tracingOptions === void 0 ? void 0 : tracingOptions.spanOptions; - } - return result; -} -//# sourceMappingURL=operationOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationParameter.js b/node_modules/@azure/core-http/dist-esm/src/operationParameter.js deleted file mode 100644 index 1361e298a..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/operationParameter.js +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Get the path to this parameter's value as a dotted string (a.b.c). - * @param parameter - The parameter to get the path string for. - * @returns The path to this parameter's value as a dotted string. - */ -export function getPathStringFromParameter(parameter) { - return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); -} -export function getPathStringFromParameterPath(parameterPath, mapper) { - let result; - if (typeof parameterPath === "string") { - result = parameterPath; - } - else if (Array.isArray(parameterPath)) { - result = parameterPath.join("."); - } - else { - result = mapper.serializedName; - } - return result; -} -//# sourceMappingURL=operationParameter.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationResponse.js b/node_modules/@azure/core-http/dist-esm/src/operationResponse.js deleted file mode 100644 index 6781aacae..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/operationResponse.js +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -export {}; -//# sourceMappingURL=operationResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/operationSpec.js b/node_modules/@azure/core-http/dist-esm/src/operationSpec.js deleted file mode 100644 index 2ba9e6c89..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/operationSpec.js +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { MapperType } from "./serializer"; -/** - * Gets the list of status codes for streaming responses. - * @internal - */ -export function getStreamResponseStatusCodes(operationSpec) { - const result = new Set(); - for (const statusCode in operationSpec.responses) { - const operationResponse = operationSpec.responses[statusCode]; - if (operationResponse.bodyMapper && - operationResponse.bodyMapper.type.name === MapperType.Stream) { - result.add(Number(statusCode)); - } - } - return result; -} -//# sourceMappingURL=operationSpec.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js b/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js deleted file mode 100644 index 2abe14a88..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/pipelineOptions.js +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -export {}; -//# sourceMappingURL=pipelineOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js deleted file mode 100644 index 37d2d9e5b..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/bearerTokenAuthenticationPolicy.js +++ /dev/null @@ -1,183 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "../policies/requestPolicy"; -import { Constants } from "../util/constants"; -import { delay } from "@azure/core-util"; -// Default options for the cycler if none are provided -export const DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1000, - retryIntervalInMs: 3000, - refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry -}; -/** - * Converts an an unreliable access token getter (which may resolve with null) - * into an AccessTokenGetter by retrying the unreliable getter in a regular - * interval. - * - * @param getAccessToken - a function that produces a promise of an access - * token that may fail by returning null - * @param retryIntervalInMs - the time (in milliseconds) to wait between retry - * attempts - * @param timeoutInMs - the timestamp after which the refresh attempt will fail, - * throwing an exception - * @returns - a promise that, if it resolves, will resolve with an access token - */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } - } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; - } - } - let token = await tryGetAccessToken(); - while (token === null) { - await delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; -} -/** - * Creates a token cycler from a credential, scopes, and optional settings. - * - * A token cycler represents a way to reliably retrieve a valid access token - * from a TokenCredential. It will handle initializing the token, refreshing it - * when it nears expiration, and synchronizes refresh attempts to avoid - * concurrency hazards. - * - * @param credential - the underlying TokenCredential that provides the access - * token - * @param scopes - the scopes to request authorization for - * @param tokenCyclerOptions - optionally override default settings for the cycler - * - * @returns - a function that reliably produces a valid access token - */ -function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); - /** - * This little holder defines several predicates that we use to construct - * the rules of refreshing the token. - */ - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; - }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - var _a; - return (!cycler.isRefreshing && - ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); - }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); - }, - }; - /** - * Starts a refresh job or returns the existing job if one is already - * running. - */ - function refresh(getTokenOptions) { - var _a; - if (!cycler.isRefreshing) { - // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - // Take advantage of promise chaining to insert an assignment to `token` - // before the refresh can be considered done. - refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { - refreshWorker = null; - token = _token; - return token; - }) - .catch((reason) => { - // We also should reset the refresher if we enter a failed state. All - // existing awaiters will throw, but subsequent requests will start a - // new retry chain. - refreshWorker = null; - token = null; - throw reason; - }); - } - return refreshWorker; - } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; -} -// #endregion -/** - * Creates a new factory for a RequestPolicy that applies a bearer token to - * the requests' `Authorization` headers. - * - * @param credential - The TokenCredential implementation that can supply the bearer token. - * @param scopes - The scopes for which the bearer token applies. - */ -export function bearerTokenAuthenticationPolicy(credential, scopes) { - // This simple function encapsulates the entire process of reliably retrieving the token - const getToken = createTokenCycler(credential, scopes /* , options */); - class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const { token } = await getToken({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext, - }, - }); - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - return this._nextPolicy.sendRequest(webResource); - } - } - return { - create: (nextPolicy, options) => { - return new BearerTokenAuthenticationPolicy(nextPolicy, options); - }, - }; -} -//# sourceMappingURL=bearerTokenAuthenticationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js deleted file mode 100644 index 5f9bd8bbd..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/deserializationPolicy.js +++ /dev/null @@ -1,239 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -import { XML_CHARKEY } from "../util/serializer.common"; -import { MapperType } from "../serializer"; -import { RestError } from "../restError"; -import { parseXML } from "../util/xml"; -/** - * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they - * pass through the HTTP pipeline. - */ -export function deserializationPolicy(deserializationContentTypes, parsingOptions) { - return { - create: (nextPolicy, options) => { - return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); - }, - }; -} -export const defaultJsonContentTypes = ["application/json", "text/json"]; -export const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; -export const DefaultDeserializationOptions = { - expectedContentTypes: { - json: defaultJsonContentTypes, - xml: defaultXmlContentTypes, - }, -}; -/** - * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the - * HTTP pipeline. - */ -export class DeserializationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions = {}) { - var _a; - super(nextPolicy, requestPolicyOptions); - this.jsonContentTypes = - (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; - this.xmlContentTypes = - (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; - this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - } - async sendRequest(request) { - return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { - xmlCharKey: this.xmlCharKey, - })); - } -} -function getOperationResponse(parsedResponse) { - let result; - const request = parsedResponse.request; - const operationSpec = request.operationSpec; - if (operationSpec) { - const operationResponseGetter = request.operationResponseGetter; - if (!operationResponseGetter) { - result = operationSpec.responses[parsedResponse.status]; - } - else { - result = operationResponseGetter(operationSpec, parsedResponse); - } - } - return result; -} -function shouldDeserializeResponse(parsedResponse) { - const shouldDeserialize = parsedResponse.request.shouldDeserialize; - let result; - if (shouldDeserialize === undefined) { - result = true; - } - else if (typeof shouldDeserialize === "boolean") { - result = shouldDeserialize; - } - else { - result = shouldDeserialize(parsedResponse); - } - return result; -} -/** - * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. - * @param jsonContentTypes - Response content types to parse the body as JSON. - * @param xmlContentTypes - Response content types to parse the body as XML. - * @param response - HTTP Response from the pipeline. - * @param options - Options to the serializer, mostly for configuring the XML parser if needed. - * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. - */ -export function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { - if (!shouldDeserializeResponse(parsedResponse)) { - return parsedResponse; - } - const operationSpec = parsedResponse.request.operationSpec; - if (!operationSpec || !operationSpec.responses) { - return parsedResponse; - } - const responseSpec = getOperationResponse(parsedResponse); - const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec); - if (error) { - throw error; - } - else if (shouldReturnResponse) { - return parsedResponse; - } - // An operation response spec does exist for current status code, so - // use it to deserialize the response. - if (responseSpec) { - if (responseSpec.bodyMapper) { - let valueToDeserialize = parsedResponse.parsedBody; - if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { - valueToDeserialize = - typeof valueToDeserialize === "object" - ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] - : []; - } - try { - parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); - } - catch (innerError) { - const restError = new RestError(`Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); - throw restError; - } - } - else if (operationSpec.httpMethod === "HEAD") { - // head methods never have a body, but we return a boolean to indicate presence/absence of the resource - parsedResponse.parsedBody = response.status >= 200 && response.status < 300; - } - if (responseSpec.headersMapper) { - parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJson(), "operationRes.parsedHeaders", options); - } - } - return parsedResponse; - }); -} -function isOperationSpecEmpty(operationSpec) { - const expectedStatusCodes = Object.keys(operationSpec.responses); - return (expectedStatusCodes.length === 0 || - (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); -} -function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { - var _a; - const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; - const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) - ? isSuccessByStatus - : !!responseSpec; - if (isExpectedStatusCode) { - if (responseSpec) { - if (!responseSpec.isError) { - return { error: null, shouldReturnResponse: false }; - } - } - else { - return { error: null, shouldReturnResponse: false }; - } - } - const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; - const streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || - parsedResponse.request.streamResponseBody; - const initialErrorMessage = streaming - ? `Unexpected status code: ${parsedResponse.status}` - : parsedResponse.bodyAsText; - const error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); - // If the item failed but there's no error spec or default spec to deserialize the error, - // we should fail so we just throw the parsed response - if (!errorResponseSpec) { - throw error; - } - const defaultBodyMapper = errorResponseSpec.bodyMapper; - const defaultHeadersMapper = errorResponseSpec.headersMapper; - try { - // If error response has a body, try to deserialize it using default body mapper. - // Then try to extract error code & message from it - if (parsedResponse.parsedBody) { - const parsedBody = parsedResponse.parsedBody; - let parsedError; - if (defaultBodyMapper) { - let valueToDeserialize = parsedBody; - if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) { - valueToDeserialize = - typeof parsedBody === "object" ? parsedBody[defaultBodyMapper.xmlElementName] : []; - } - parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody"); - } - const internalError = parsedBody.error || parsedError || parsedBody; - error.code = internalError.code; - if (internalError.message) { - error.message = internalError.message; - } - if (defaultBodyMapper) { - error.response.parsedBody = parsedError; - } - } - // If error response has headers, try to deserialize it using default header mapper - if (parsedResponse.headers && defaultHeadersMapper) { - error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJson(), "operationRes.parsedHeaders"); - } - } - catch (defaultError) { - error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; - } - return { error, shouldReturnResponse: false }; -} -function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { - var _a; - const errorHandler = (err) => { - const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; - const errCode = err.code || RestError.PARSE_ERROR; - const e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); - return Promise.reject(e); - }; - const streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || - operationResponse.request.streamResponseBody; - if (!streaming && operationResponse.bodyAsText) { - const text = operationResponse.bodyAsText; - const contentType = operationResponse.headers.get("Content-Type") || ""; - const contentComponents = !contentType - ? [] - : contentType.split(";").map((component) => component.toLowerCase()); - if (contentComponents.length === 0 || - contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { - return new Promise((resolve) => { - operationResponse.parsedBody = JSON.parse(text); - resolve(operationResponse); - }).catch(errorHandler); - } - else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { - return parseXML(text, opts) - .then((body) => { - operationResponse.parsedBody = body; - return operationResponse; - }) - .catch(errorHandler); - } - } - return Promise.resolve(operationResponse); -} -//# sourceMappingURL=deserializationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js deleted file mode 100644 index 3bbbec5b1..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/* - * NOTE: When moving this file, please update "browser" section in package.json - */ -import { BaseRequestPolicy, } from "./requestPolicy"; -const errorMessage = "DisableResponseDecompressionPolicy is not supported in browser environment"; -/** - * {@link DisableResponseDecompressionPolicy} is not supported in browser and attempting - * to use it will results in error being thrown. - */ -export function disableResponseDecompressionPolicy() { - return { - create: (_nextPolicy, _options) => { - throw new Error(errorMessage); - }, - }; -} -export class DisableResponseDecompressionPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - throw new Error(errorMessage); - } - async sendRequest(_request) { - throw new Error(errorMessage); - } -} -//# sourceMappingURL=disableResponseDecompressionPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js deleted file mode 100644 index bf033e004..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/disableResponseDecompressionPolicy.js +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -/** - * Returns a request policy factory that can be used to create an instance of - * {@link DisableResponseDecompressionPolicy}. - */ -export function disableResponseDecompressionPolicy() { - return { - create: (nextPolicy, options) => { - return new DisableResponseDecompressionPolicy(nextPolicy, options); - }, - }; -} -/** - * A policy to disable response decompression according to Accept-Encoding header - * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding - */ -export class DisableResponseDecompressionPolicy extends BaseRequestPolicy { - /** - * Creates an instance of DisableResponseDecompressionPolicy. - * - * @param nextPolicy - - * @param options - - */ - // The parent constructor is protected. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.decompressResponse = false; - return this._nextPolicy.sendRequest(request); - } -} -//# sourceMappingURL=disableResponseDecompressionPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js deleted file mode 100644 index 0df2f6bde..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/exponentialRetryPolicy.js +++ /dev/null @@ -1,106 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -import { DEFAULT_CLIENT_MAX_RETRY_INTERVAL, DEFAULT_CLIENT_RETRY_COUNT, DEFAULT_CLIENT_RETRY_INTERVAL, isNumber, shouldRetry, updateRetryData, } from "../util/exponentialBackoffStrategy"; -import { Constants } from "../util/constants"; -import { RestError } from "../restError"; -import { delay } from "@azure/core-util"; -import { logger } from "../log"; -/** - * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. - * @param retryCount - Maximum number of retries. - * @param retryInterval - Base time between retries. - * @param maxRetryInterval - Maximum time to wait between retries. - */ -export function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { - return { - create: (nextPolicy, options) => { - return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); - }, - }; -} -/** - * Describes the Retry Mode type. Currently supporting only Exponential. - */ -export var RetryMode; -(function (RetryMode) { - /** - * Currently supported retry mode. - * Each time a retry happens, it will take exponentially more time than the last time. - */ - RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; -})(RetryMode || (RetryMode = {})); -export const DefaultRetryOptions = { - maxRetries: DEFAULT_CLIENT_RETRY_COUNT, - retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, - maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, -}; -/** - * Instantiates a new "ExponentialRetryPolicyFilter" instance. - */ -export class ExponentialRetryPolicy extends BaseRequestPolicy { - /** - * @param nextPolicy - The next RequestPolicy in the pipeline chain. - * @param options - The options for this RequestPolicy. - * @param retryCount - The client retry count. - * @param retryInterval - The client retry interval, in milliseconds. - * @param minRetryInterval - The minimum retry interval, in milliseconds. - * @param maxRetryInterval - The maximum retry interval, in milliseconds. - */ - constructor(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { - super(nextPolicy, options); - this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; - this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; - this.maxRetryInterval = isNumber(maxRetryInterval) - ? maxRetryInterval - : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request.clone()) - .then((response) => retry(this, request, response)) - .catch((error) => retry(this, request, error.response, undefined, error)); - } -} -async function retry(policy, request, response, retryData, requestError) { - function shouldPolicyRetry(responseParam) { - const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; - if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { - return false; - } - if (statusCode === undefined || - (statusCode < 500 && statusCode !== 408) || - statusCode === 501 || - statusCode === 505) { - return false; - } - return true; - } - retryData = updateRetryData({ - retryInterval: policy.retryInterval, - minRetryInterval: 0, - maxRetryInterval: policy.maxRetryInterval, - }, retryData, requestError); - const isAborted = request.abortSignal && request.abortSignal.aborted; - if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { - logger.info(`Retrying request in ${retryData.retryInterval}`); - try { - await delay(retryData.retryInterval); - const res = await policy._nextPolicy.sendRequest(request.clone()); - return retry(policy, request, res, retryData); - } - catch (err) { - return retry(policy, request, response, retryData, err); - } - } - else if (isAborted || requestError || !response) { - // If the operation failed in the end, return all errors instead of just the last one - const err = retryData.error || - new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); - throw err; - } - else { - return response; - } -} -//# sourceMappingURL=exponentialRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js deleted file mode 100644 index 868c1b71c..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/generateClientRequestIdPolicy.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -/** - * Creates a policy that assigns a unique request id to outgoing requests. - * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. - */ -export function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { - return { - create: (nextPolicy, options) => { - return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); - }, - }; -} -export class GenerateClientRequestIdPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _requestIdHeaderName) { - super(nextPolicy, options); - this._requestIdHeaderName = _requestIdHeaderName; - } - sendRequest(request) { - if (!request.headers.contains(this._requestIdHeaderName)) { - request.headers.set(this._requestIdHeaderName, request.requestId); - } - return this._nextPolicy.sendRequest(request); - } -} -//# sourceMappingURL=generateClientRequestIdPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js deleted file mode 100644 index 22581800c..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/keepAlivePolicy.js +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -/** - * By default, HTTP connections are maintained for future requests. - */ -export const DefaultKeepAliveOptions = { - enable: true, -}; -/** - * Creates a policy that controls whether HTTP connections are maintained on future requests. - * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. - * @returns An instance of the {@link KeepAlivePolicy} - */ -export function keepAlivePolicy(keepAliveOptions) { - return { - create: (nextPolicy, options) => { - return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); - }, - }; -} -/** - * KeepAlivePolicy is a policy used to control keep alive settings for every request. - */ -export class KeepAlivePolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - * - * @param nextPolicy - - * @param options - - * @param keepAliveOptions - - */ - constructor(nextPolicy, options, keepAliveOptions) { - super(nextPolicy, options); - this.keepAliveOptions = keepAliveOptions; - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.keepAlive = this.keepAliveOptions.enable; - return this._nextPolicy.sendRequest(request); - } -} -//# sourceMappingURL=keepAlivePolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js deleted file mode 100644 index 05d1225a4..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/logPolicy.js +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -import { Sanitizer } from "../util/sanitizer"; -import { logger as coreLogger } from "../log"; -/** - * Creates a policy that logs information about the outgoing request and the incoming responses. - * @param loggingOptions - Logging options. - * @returns An instance of the {@link LogPolicy} - */ -export function logPolicy(loggingOptions = {}) { - return { - create: (nextPolicy, options) => { - return new LogPolicy(nextPolicy, options, loggingOptions); - }, - }; -} -/** - * A policy that logs information about the outgoing request and the incoming responses. - */ -export class LogPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, { logger = coreLogger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { - super(nextPolicy, options); - this.logger = logger; - this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - get allowedHeaderNames() { - return this.sanitizer.allowedHeaderNames; - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - set allowedHeaderNames(allowedHeaderNames) { - this.sanitizer.allowedHeaderNames = allowedHeaderNames; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - get allowedQueryParameters() { - return this.sanitizer.allowedQueryParameters; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - set allowedQueryParameters(allowedQueryParameters) { - this.sanitizer.allowedQueryParameters = allowedQueryParameters; - } - sendRequest(request) { - if (!this.logger.enabled) - return this._nextPolicy.sendRequest(request); - this.logRequest(request); - return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); - } - logRequest(request) { - this.logger(`Request: ${this.sanitizer.sanitize(request)}`); - } - logResponse(response) { - this.logger(`Response status code: ${response.status}`); - this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); - return response; - } -} -//# sourceMappingURL=logPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js deleted file mode 100644 index 36467994f..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.browser.js +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -export function getDefaultUserAgentKey() { - return "x-ms-useragent"; -} -export function getPlatformSpecificData() { - const navigator = self.navigator; - const osInfo = { - key: "OS", - value: (navigator.oscpu || navigator.platform).replace(" ", ""), - }; - return [osInfo]; -} -//# sourceMappingURL=msRestUserAgentPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js deleted file mode 100644 index e1210a9ac..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.js +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import * as os from "os"; -import { Constants } from "../util/constants"; -export function getDefaultUserAgentKey() { - return Constants.HeaderConstants.USER_AGENT; -} -export function getPlatformSpecificData() { - const runtimeInfo = { - key: "Node", - value: process.version, - }; - const osInfo = { - key: "OS", - value: `(${os.arch()}-${os.type()}-${os.release()})`, - }; - return [runtimeInfo, osInfo]; -} -//# sourceMappingURL=msRestUserAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js b/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js deleted file mode 100644 index fe17cf341..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/msRestUserAgentPolicy.native.js +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const { Platform } = require("react-native"); // eslint-disable-line import/no-extraneous-dependencies, @typescript-eslint/no-require-imports -export function getDefaultUserAgentKey() { - return "x-ms-useragent"; -} -export function getPlatformSpecificData() { - const { major, minor, patch } = Platform.constants.reactNativeVersion; - const runtimeInfo = { - key: "react-native", - value: `${major}.${minor}.${patch}`, - }; - const osInfo = { - key: "OS", - value: `${Platform.OS}-${Platform.Version}`, - }; - return [runtimeInfo, osInfo]; -} -//# sourceMappingURL=msRestUserAgentPolicy.native.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js deleted file mode 100644 index 19119e555..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/ndJsonPolicy.js +++ /dev/null @@ -1,37 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// BaseRequestPolicy has a protected constructor. -/* eslint-disable @typescript-eslint/no-useless-constructor */ -import { BaseRequestPolicy, } from "./requestPolicy"; -export function ndJsonPolicy() { - return { - create: (nextPolicy, options) => { - return new NdJsonPolicy(nextPolicy, options); - }, - }; -} -/** - * NdJsonPolicy that formats a JSON array as newline-delimited JSON - */ -class NdJsonPolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends a request. - */ - async sendRequest(request) { - // There currently isn't a good way to bypass the serializer - if (typeof request.body === "string" && request.body.startsWith("[")) { - const body = JSON.parse(request.body); - if (Array.isArray(body)) { - request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); - } - } - return this._nextPolicy.sendRequest(request); - } -} -//# sourceMappingURL=ndJsonPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js deleted file mode 100644 index 84490b8f9..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.browser.js +++ /dev/null @@ -1,24 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -const errorMessage = "ProxyPolicy is not supported in browser environment"; -export function getDefaultProxySettings(_proxyUrl) { - return undefined; -} -export function proxyPolicy(_proxySettings) { - return { - create: (_nextPolicy, _options) => { - throw new Error(errorMessage); - }, - }; -} -export class ProxyPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - throw new Error(errorMessage); - } - sendRequest(_request) { - throw new Error(errorMessage); - } -} -//# sourceMappingURL=proxyPolicy.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js deleted file mode 100644 index 031230bc7..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/proxyPolicy.js +++ /dev/null @@ -1,150 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -import { Constants } from "../util/constants"; -import { URLBuilder } from "../url"; -import { getEnvironmentValue } from "../util/utils"; -/** - * Stores the patterns specified in NO_PROXY environment variable. - * @internal - */ -export const globalNoProxyList = []; -let noProxyListLoaded = false; -/** A cache of whether a host should bypass the proxy. */ -const globalBypassedMap = new Map(); -function loadEnvironmentProxyValue() { - if (!process) { - return undefined; - } - const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); - const allProxy = getEnvironmentValue(Constants.ALL_PROXY); - const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); - return httpsProxy || allProxy || httpProxy; -} -/** - * Check whether the host of a given `uri` matches any pattern in the no proxy list. - * If there's a match, any request sent to the same host shouldn't have the proxy settings set. - * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 - */ -function isBypassed(uri, noProxyList, bypassedMap) { - if (noProxyList.length === 0) { - return false; - } - const host = URLBuilder.parse(uri).getHost(); - if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { - return bypassedMap.get(host); - } - let isBypassedFlag = false; - for (const pattern of noProxyList) { - if (pattern[0] === ".") { - // This should match either domain it self or any subdomain or host - // .foo.com will match foo.com it self or *.foo.com - if (host.endsWith(pattern)) { - isBypassedFlag = true; - } - else { - if (host.length === pattern.length - 1 && host === pattern.slice(1)) { - isBypassedFlag = true; - } - } - } - else { - if (host === pattern) { - isBypassedFlag = true; - } - } - } - bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); - return isBypassedFlag; -} -/** - * @internal - */ -export function loadNoProxy() { - const noProxy = getEnvironmentValue(Constants.NO_PROXY); - noProxyListLoaded = true; - if (noProxy) { - return noProxy - .split(",") - .map((item) => item.trim()) - .filter((item) => item.length); - } - return []; -} -/** - * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. - * @param proxyUrl - URL of the proxy - * @returns The default proxy settings, or undefined. - */ -export function getDefaultProxySettings(proxyUrl) { - if (!proxyUrl) { - proxyUrl = loadEnvironmentProxyValue(); - if (!proxyUrl) { - return undefined; - } - } - const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); - const parsedUrl = URLBuilder.parse(urlWithoutAuth); - const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; - return { - host: schema + parsedUrl.getHost(), - port: Number.parseInt(parsedUrl.getPort() || "80"), - username, - password, - }; -} -/** - * A policy that allows one to apply proxy settings to all requests. - * If not passed static settings, they will be retrieved from the HTTPS_PROXY - * or HTTP_PROXY environment variables. - * @param proxySettings - ProxySettings to use on each request. - * @param options - additional settings, for example, custom NO_PROXY patterns - */ -export function proxyPolicy(proxySettings, options) { - if (!proxySettings) { - proxySettings = getDefaultProxySettings(); - } - if (!noProxyListLoaded) { - globalNoProxyList.push(...loadNoProxy()); - } - return { - create: (nextPolicy, requestPolicyOptions) => { - return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); - }, - }; -} -function extractAuthFromUrl(url) { - const atIndex = url.indexOf("@"); - if (atIndex === -1) { - return { urlWithoutAuth: url }; - } - const schemeIndex = url.indexOf("://"); - const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; - const auth = url.substring(authStart, atIndex); - const colonIndex = auth.indexOf(":"); - const hasPassword = colonIndex !== -1; - const username = hasPassword ? auth.substring(0, colonIndex) : auth; - const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; - const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); - return { - username, - password, - urlWithoutAuth, - }; -} -export class ProxyPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, proxySettings, customNoProxyList) { - super(nextPolicy, options); - this.proxySettings = proxySettings; - this.customNoProxyList = customNoProxyList; - } - sendRequest(request) { - var _a; - if (!request.proxySettings && - !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { - request.proxySettings = this.proxySettings; - } - return this._nextPolicy.sendRequest(request); - } -} -//# sourceMappingURL=proxyPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js deleted file mode 100644 index 2a8a0597f..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/redirectPolicy.js +++ /dev/null @@ -1,64 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -import { URLBuilder } from "../url"; -/** - * Methods that are allowed to follow redirects 301 and 302 - */ -const allowedRedirect = ["GET", "HEAD"]; -export const DefaultRedirectOptions = { - handleRedirects: true, - maxRetries: 20, -}; -/** - * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. - * @param maximumRetries - Maximum number of redirects to follow. - * @returns An instance of the {@link RedirectPolicy} - */ -export function redirectPolicy(maximumRetries = 20) { - return { - create: (nextPolicy, options) => { - return new RedirectPolicy(nextPolicy, options, maximumRetries); - }, - }; -} -/** - * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. - */ -export class RedirectPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, maxRetries = 20) { - super(nextPolicy, options); - this.maxRetries = maxRetries; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request) - .then((response) => handleRedirect(this, response, 0)); - } -} -function handleRedirect(policy, response, currentRetries) { - const { request, status } = response; - const locationHeader = response.headers.get("location"); - if (locationHeader && - (status === 300 || - (status === 301 && allowedRedirect.includes(request.method)) || - (status === 302 && allowedRedirect.includes(request.method)) || - (status === 303 && request.method === "POST") || - status === 307) && - (!policy.maxRetries || currentRetries < policy.maxRetries)) { - const builder = URLBuilder.parse(request.url); - builder.setPath(locationHeader); - request.url = builder.toString(); - // POST request with Status code 303 should be converted into a - // redirected GET request if the redirect url is present in the location header - if (status === 303) { - request.method = "GET"; - delete request.body; - } - return policy._nextPolicy - .sendRequest(request) - .then((res) => handleRedirect(policy, res, currentRetries + 1)); - } - return Promise.resolve(response); -} -//# sourceMappingURL=redirectPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js deleted file mode 100644 index 639c271b2..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/requestPolicy.js +++ /dev/null @@ -1,70 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { HttpPipelineLogLevel } from "../httpPipelineLogLevel"; -/** - * The base class from which all request policies derive. - */ -export class BaseRequestPolicy { - /** - * The main method to implement that manipulates a request/response. - */ - constructor( - /** - * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. - */ - _nextPolicy, - /** - * The options that can be passed to a given request policy. - */ - _options) { - this._nextPolicy = _nextPolicy; - this._options = _options; - } - /** - * Get whether or not a log with the provided log level should be logged. - * @param logLevel - The log level of the log that will be logged. - * @returns Whether or not a log with the provided log level should be logged. - */ - shouldLog(logLevel) { - return this._options.shouldLog(logLevel); - } - /** - * Attempt to log the provided message to the provided logger. If no logger was provided or if - * the log level does not meat the logger's threshold, then nothing will be logged. - * @param logLevel - The log level of this log. - * @param message - The message of this log. - */ - log(logLevel, message) { - this._options.log(logLevel, message); - } -} -/** - * Optional properties that can be used when creating a RequestPolicy. - */ -export class RequestPolicyOptions { - constructor(_logger) { - this._logger = _logger; - } - /** - * Get whether or not a log with the provided log level should be logged. - * @param logLevel - The log level of the log that will be logged. - * @returns Whether or not a log with the provided log level should be logged. - */ - shouldLog(logLevel) { - return (!!this._logger && - logLevel !== HttpPipelineLogLevel.OFF && - logLevel <= this._logger.minimumLogLevel); - } - /** - * Attempt to log the provided message to the provided logger. If no logger was provided or if - * the log level does not meet the logger's threshold, then nothing will be logged. - * @param logLevel - The log level of this log. - * @param message - The message of this log. - */ - log(logLevel, message) { - if (this._logger && this.shouldLog(logLevel)) { - this._logger.log(logLevel, message); - } - } -} -//# sourceMappingURL=requestPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js deleted file mode 100644 index 9713bcca5..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/rpRegistrationPolicy.js +++ /dev/null @@ -1,153 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import * as utils from "../util/utils"; -import { BaseRequestPolicy, } from "./requestPolicy"; -import { delay } from "@azure/core-util"; -export function rpRegistrationPolicy(retryTimeout = 30) { - return { - create: (nextPolicy, options) => { - return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); - }, - }; -} -export class RPRegistrationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _retryTimeout = 30) { - super(nextPolicy, options); - this._retryTimeout = _retryTimeout; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request.clone()) - .then((response) => registerIfNeeded(this, request, response)); - } -} -function registerIfNeeded(policy, request, response) { - if (response.status === 409) { - const rpName = checkRPNotRegisteredError(response.bodyAsText); - if (rpName) { - const urlPrefix = extractSubscriptionUrl(request.url); - return (registerRP(policy, urlPrefix, rpName, request) - // Autoregistration of ${provider} failed for some reason. We will not return this error - // instead will return the initial response with 409 status code back to the user. - // do nothing here as we are returning the original response at the end of this method. - .catch(() => false) - .then((registrationStatus) => { - if (registrationStatus) { - // Retry the original request. We have to change the x-ms-client-request-id - // otherwise Azure endpoint will return the initial 409 (cached) response. - request.headers.set("x-ms-client-request-id", utils.generateUuid()); - return policy._nextPolicy.sendRequest(request.clone()); - } - return response; - })); - } - } - return Promise.resolve(response); -} -/** - * Reuses the headers of the original request and url (if specified). - * @param originalRequest - The original request - * @param reuseUrlToo - Should the url from the original request be reused as well. Default false. - * @returns A new request object with desired headers. - */ -function getRequestEssentials(originalRequest, reuseUrlToo = false) { - const reqOptions = originalRequest.clone(); - if (reuseUrlToo) { - reqOptions.url = originalRequest.url; - } - // We have to change the x-ms-client-request-id otherwise Azure endpoint - // will return the initial 409 (cached) response. - reqOptions.headers.set("x-ms-client-request-id", utils.generateUuid()); - // Set content-type to application/json - reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); - return reqOptions; -} -/** - * Validates the error code and message associated with 409 response status code. If it matches to that of - * RP not registered then it returns the name of the RP else returns undefined. - * @param body - The response body received after making the original request. - * @returns The name of the RP if condition is satisfied else undefined. - */ -function checkRPNotRegisteredError(body) { - let result, responseBody; - if (body) { - try { - responseBody = JSON.parse(body); - } - catch (err) { - // do nothing; - } - if (responseBody && - responseBody.error && - responseBody.error.message && - responseBody.error.code && - responseBody.error.code === "MissingSubscriptionRegistration") { - const matchRes = responseBody.error.message.match(/.*'(.*)'/i); - if (matchRes) { - result = matchRes.pop(); - } - } - } - return result; -} -/** - * Extracts the first part of the URL, just after subscription: - * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ - * @param url - The original request url - * @returns The url prefix as explained above. - */ -function extractSubscriptionUrl(url) { - let result; - const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); - if (matchRes && matchRes[0]) { - result = matchRes[0]; - } - else { - throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); - } - return result; -} -/** - * Registers the given provider. - * @param policy - The RPRegistrationPolicy this function is being called against. - * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ - * @param provider - The provider name to be registered. - * @param originalRequest - The original request sent by the user that returned a 409 response - * with a message that the provider is not registered. - */ -async function registerRP(policy, urlPrefix, provider, originalRequest) { - const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; - const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; - const reqOptions = getRequestEssentials(originalRequest); - reqOptions.method = "POST"; - reqOptions.url = postUrl; - const response = await policy._nextPolicy.sendRequest(reqOptions); - if (response.status !== 200) { - throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); - } - return getRegistrationStatus(policy, getUrl, originalRequest); -} -/** - * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. - * Polling will happen till the registrationState property of the response body is "Registered". - * @param policy - The RPRegistrationPolicy this function is being called against. - * @param url - The request url for polling - * @param originalRequest - The original request sent by the user that returned a 409 response - * with a message that the provider is not registered. - * @returns True if RP Registration is successful. - */ -async function getRegistrationStatus(policy, url, originalRequest) { - const reqOptions = getRequestEssentials(originalRequest); - reqOptions.url = url; - reqOptions.method = "GET"; - const res = await policy._nextPolicy.sendRequest(reqOptions); - const obj = res.parsedBody; - if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { - return true; - } - else { - await delay(policy._retryTimeout * 1000); - return getRegistrationStatus(policy, url, originalRequest); - } -} -//# sourceMappingURL=rpRegistrationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js deleted file mode 100644 index c298812cb..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/signingPolicy.js +++ /dev/null @@ -1,31 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -/** - * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. - * @param authenticationProvider - The authentication provider. - * @returns An instance of the {@link SigningPolicy}. - */ -export function signingPolicy(authenticationProvider) { - return { - create: (nextPolicy, options) => { - return new SigningPolicy(nextPolicy, options, authenticationProvider); - }, - }; -} -/** - * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. - */ -export class SigningPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, authenticationProvider) { - super(nextPolicy, options); - this.authenticationProvider = authenticationProvider; - } - signRequest(request) { - return this.authenticationProvider.signRequest(request); - } - sendRequest(request) { - return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); - } -} -//# sourceMappingURL=signingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js deleted file mode 100644 index 3832f41d3..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/systemErrorRetryPolicy.js +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -import { DEFAULT_CLIENT_MAX_RETRY_INTERVAL, DEFAULT_CLIENT_MIN_RETRY_INTERVAL, DEFAULT_CLIENT_RETRY_COUNT, DEFAULT_CLIENT_RETRY_INTERVAL, isNumber, shouldRetry, updateRetryData, } from "../util/exponentialBackoffStrategy"; -import { delay } from "@azure/core-util"; -/** - * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". - * @param retryCount - Maximum number of retries. - * @param retryInterval - The client retry interval, in milliseconds. - * @param minRetryInterval - The minimum retry interval, in milliseconds. - * @param maxRetryInterval - The maximum retry interval, in milliseconds. - * @returns An instance of the {@link SystemErrorRetryPolicy} - */ -export function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { - return { - create: (nextPolicy, options) => { - return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); - }, - }; -} -/** - * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". - * @param retryCount - The client retry count. - * @param retryInterval - The client retry interval, in milliseconds. - * @param minRetryInterval - The minimum retry interval, in milliseconds. - * @param maxRetryInterval - The maximum retry interval, in milliseconds. - */ -export class SystemErrorRetryPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { - super(nextPolicy, options); - this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; - this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; - this.minRetryInterval = isNumber(minRetryInterval) - ? minRetryInterval - : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; - this.maxRetryInterval = isNumber(maxRetryInterval) - ? maxRetryInterval - : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request.clone()) - .catch((error) => retry(this, request, error.response, error)); - } -} -async function retry(policy, request, operationResponse, err, retryData) { - retryData = updateRetryData(policy, retryData, err); - function shouldPolicyRetry(_response, error) { - if (error && - error.code && - (error.code === "ETIMEDOUT" || - error.code === "ESOCKETTIMEDOUT" || - error.code === "ECONNREFUSED" || - error.code === "ECONNRESET" || - error.code === "ENOENT")) { - return true; - } - return false; - } - if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) { - // If previous operation ended with an error and the policy allows a retry, do that - try { - await delay(retryData.retryInterval); - return policy._nextPolicy.sendRequest(request.clone()); - } - catch (nestedErr) { - return retry(policy, request, operationResponse, nestedErr, retryData); - } - } - else { - if (err) { - // If the operation failed in the end, return all errors instead of just the last one - return Promise.reject(retryData.error); - } - return operationResponse; - } -} -//# sourceMappingURL=systemErrorRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js deleted file mode 100644 index c9fac19b0..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/throttlingRetryPolicy.js +++ /dev/null @@ -1,97 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -import { AbortError } from "@azure/abort-controller"; -import { Constants } from "../util/constants"; -import { DEFAULT_CLIENT_MAX_RETRY_COUNT } from "../util/throttlingRetryStrategy"; -import { delay } from "@azure/core-util"; -const StatusCodes = Constants.HttpConstants.StatusCodes; -/** - * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. - * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. - * - * To learn more, please refer to - * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, - * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and - * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors - * @returns - */ -export function throttlingRetryPolicy() { - return { - create: (nextPolicy, options) => { - return new ThrottlingRetryPolicy(nextPolicy, options); - }, - }; -} -const StandardAbortMessage = "The operation was aborted."; -/** - * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. - * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. - * - * To learn more, please refer to - * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, - * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and - * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors - */ -export class ThrottlingRetryPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _handleResponse) { - super(nextPolicy, options); - this.numberOfRetries = 0; - this._handleResponse = _handleResponse || this._defaultResponseHandler; - } - async sendRequest(httpRequest) { - const response = await this._nextPolicy.sendRequest(httpRequest.clone()); - if (response.status !== StatusCodes.TooManyRequests && - response.status !== StatusCodes.ServiceUnavailable) { - return response; - } - else { - return this._handleResponse(httpRequest, response); - } - } - async _defaultResponseHandler(httpRequest, httpResponse) { - var _a; - const retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); - if (retryAfterHeader) { - const delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); - if (delayInMs) { - this.numberOfRetries += 1; - await delay(delayInMs, { - abortSignal: httpRequest.abortSignal, - abortErrorMsg: StandardAbortMessage, - }); - if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw new AbortError(StandardAbortMessage); - } - if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { - return this.sendRequest(httpRequest); - } - else { - return this._nextPolicy.sendRequest(httpRequest); - } - } - } - return httpResponse; - } - static parseRetryAfterHeader(headerValue) { - const retryAfterInSeconds = Number(headerValue); - if (Number.isNaN(retryAfterInSeconds)) { - return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); - } - else { - return retryAfterInSeconds * 1000; - } - } - static parseDateRetryAfterHeader(headerValue) { - try { - const now = Date.now(); - const date = Date.parse(headerValue); - const diff = date - now; - return Number.isNaN(diff) ? undefined : diff; - } - catch (error) { - return undefined; - } - } -} -//# sourceMappingURL=throttlingRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js deleted file mode 100644 index 2a1b6b3f6..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/tracingPolicy.js +++ /dev/null @@ -1,126 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -import { SpanKind, SpanStatusCode, createSpanFunction, getTraceParentHeader, isSpanContextValid, } from "@azure/core-tracing"; -import { logger } from "../log"; -const createSpan = createSpanFunction({ - packagePrefix: "", - namespace: "", -}); -/** - * Creates a policy that wraps outgoing requests with a tracing span. - * @param tracingOptions - Tracing options. - * @returns An instance of the {@link TracingPolicy} class. - */ -export function tracingPolicy(tracingOptions = {}) { - return { - create(nextPolicy, options) { - return new TracingPolicy(nextPolicy, options, tracingOptions); - }, - }; -} -/** - * A policy that wraps outgoing requests with a tracing span. - */ -export class TracingPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, tracingOptions) { - super(nextPolicy, options); - this.userAgent = tracingOptions.userAgent; - } - async sendRequest(request) { - if (!request.tracingContext) { - return this._nextPolicy.sendRequest(request); - } - const span = this.tryCreateSpan(request); - if (!span) { - return this._nextPolicy.sendRequest(request); - } - try { - const response = await this._nextPolicy.sendRequest(request); - this.tryProcessResponse(span, response); - return response; - } - catch (err) { - this.tryProcessError(span, err); - throw err; - } - } - tryCreateSpan(request) { - var _a; - try { - // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. - // We can pass this as a separate parameter once we upgrade to the latest core-tracing. - const { span } = createSpan(`HTTP ${request.method}`, { - tracingOptions: { - spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: SpanKind.CLIENT }), - tracingContext: request.tracingContext, - }, - }); - // If the span is not recording, don't do any more work. - if (!span.isRecording()) { - span.end(); - return undefined; - } - const namespaceFromContext = (_a = request.tracingContext) === null || _a === void 0 ? void 0 : _a.getValue(Symbol.for("az.namespace")); - if (typeof namespaceFromContext === "string") { - span.setAttribute("az.namespace", namespaceFromContext); - } - span.setAttributes({ - "http.method": request.method, - "http.url": request.url, - requestId: request.requestId, - }); - if (this.userAgent) { - span.setAttribute("http.user_agent", this.userAgent); - } - // set headers - const spanContext = span.spanContext(); - const traceParentHeader = getTraceParentHeader(spanContext); - if (traceParentHeader && isSpanContextValid(spanContext)) { - request.headers.set("traceparent", traceParentHeader); - const traceState = spanContext.traceState && spanContext.traceState.serialize(); - // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent - if (traceState) { - request.headers.set("tracestate", traceState); - } - } - return span; - } - catch (error) { - logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`); - return undefined; - } - } - tryProcessError(span, err) { - try { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: err.message, - }); - if (err.statusCode) { - span.setAttribute("http.status_code", err.statusCode); - } - span.end(); - } - catch (error) { - logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); - } - } - tryProcessResponse(span, response) { - try { - span.setAttribute("http.status_code", response.status); - const serviceRequestId = response.headers.get("x-ms-request-id"); - if (serviceRequestId) { - span.setAttribute("serviceRequestId", serviceRequestId); - } - span.setStatus({ - code: SpanStatusCode.OK, - }); - span.end(); - } - catch (error) { - logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); - } - } -} -//# sourceMappingURL=tracingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js b/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js deleted file mode 100644 index 527856c20..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/policies/userAgentPolicy.js +++ /dev/null @@ -1,78 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, } from "./requestPolicy"; -import { getDefaultUserAgentKey, getPlatformSpecificData } from "./msRestUserAgentPolicy"; -import { Constants } from "../util/constants"; -import { HttpHeaders } from "../httpHeaders"; -function getRuntimeInfo() { - const msRestRuntime = { - key: "core-http", - value: Constants.coreHttpVersion, - }; - return [msRestRuntime]; -} -function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { - return telemetryInfo - .map((info) => { - const value = info.value ? `${valueSeparator}${info.value}` : ""; - return `${info.key}${value}`; - }) - .join(keySeparator); -} -export const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; -/** - * The default approach to generate user agents. - * Uses static information from this package, plus system information available from the runtime. - */ -export function getDefaultUserAgentValue() { - const runtimeInfo = getRuntimeInfo(); - const platformSpecificData = getPlatformSpecificData(); - const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); - return userAgent; -} -/** - * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. - * @param userAgentData - Telemetry information. - * @returns A new {@link UserAgentPolicy}. - */ -export function userAgentPolicy(userAgentData) { - const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null - ? getDefaultUserAgentKey() - : userAgentData.key; - const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null - ? getDefaultUserAgentValue() - : userAgentData.value; - return { - create: (nextPolicy, options) => { - return new UserAgentPolicy(nextPolicy, options, key, value); - }, - }; -} -/** - * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. - */ -export class UserAgentPolicy extends BaseRequestPolicy { - constructor(_nextPolicy, _options, headerKey, headerValue) { - super(_nextPolicy, _options); - this._nextPolicy = _nextPolicy; - this._options = _options; - this.headerKey = headerKey; - this.headerValue = headerValue; - } - sendRequest(request) { - this.addUserAgentHeader(request); - return this._nextPolicy.sendRequest(request); - } - /** - * Adds the user agent header to the outgoing request. - */ - addUserAgentHeader(request) { - if (!request.headers) { - request.headers = new HttpHeaders(); - } - if (!request.headers.get(this.headerKey) && this.headerValue) { - request.headers.set(this.headerKey, this.headerValue); - } - } -} -//# sourceMappingURL=userAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js b/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js deleted file mode 100644 index 47469975e..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/proxyAgent.js +++ /dev/null @@ -1,57 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import * as tunnel from "tunnel"; -import { URLBuilder } from "./url"; -export function createProxyAgent(requestUrl, proxySettings, headers) { - const host = URLBuilder.parse(proxySettings.host).getHost(); - if (!host) { - throw new Error("Expecting a non-empty host in proxy settings."); - } - if (!isValidPort(proxySettings.port)) { - throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); - } - const tunnelOptions = { - proxy: { - host: host, - port: proxySettings.port, - headers: (headers && headers.rawHeaders()) || {}, - }, - }; - if (proxySettings.username && proxySettings.password) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; - } - else if (proxySettings.username) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; - } - const isRequestHttps = isUrlHttps(requestUrl); - const isProxyHttps = isUrlHttps(proxySettings.host); - const proxyAgent = { - isHttps: isRequestHttps, - agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), - }; - return proxyAgent; -} -export function isUrlHttps(url) { - const urlScheme = URLBuilder.parse(url).getScheme() || ""; - return urlScheme.toLowerCase() === "https"; -} -export function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { - if (isRequestHttps && isProxyHttps) { - return tunnel.httpsOverHttps(tunnelOptions); - } - else if (isRequestHttps && !isProxyHttps) { - return tunnel.httpsOverHttp(tunnelOptions); - } - else if (!isRequestHttps && isProxyHttps) { - return tunnel.httpOverHttps(tunnelOptions); - } - else { - return tunnel.httpOverHttp(tunnelOptions); - } -} -function isValidPort(port) { - // any port in 0-65535 range is valid (RFC 793) even though almost all implementations - // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports - return 0 <= port && port <= 65535; -} -//# sourceMappingURL=proxyAgent.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js b/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js deleted file mode 100644 index ff08c3dc3..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/queryCollectionFormat.js +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The format that will be used to join an array of values together for a query parameter value. - */ -export var QueryCollectionFormat; -(function (QueryCollectionFormat) { - /** - * CSV: Each pair of segments joined by a single comma. - */ - QueryCollectionFormat["Csv"] = ","; - /** - * SSV: Each pair of segments joined by a single space character. - */ - QueryCollectionFormat["Ssv"] = " "; - /** - * TSV: Each pair of segments joined by a single tab character. - */ - QueryCollectionFormat["Tsv"] = "\t"; - /** - * Pipes: Each pair of segments joined by a single pipe character. - */ - QueryCollectionFormat["Pipes"] = "|"; - /** - * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` - */ - QueryCollectionFormat["Multi"] = "Multi"; -})(QueryCollectionFormat || (QueryCollectionFormat = {})); -//# sourceMappingURL=queryCollectionFormat.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/restError.js b/node_modules/@azure/core-http/dist-esm/src/restError.js deleted file mode 100644 index 88d4a7803..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/restError.js +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { Sanitizer } from "./util/sanitizer"; -import { custom } from "./util/inspect"; -const errorSanitizer = new Sanitizer(); -/** - * An error resulting from an HTTP request to a service endpoint. - */ -export class RestError extends Error { - constructor(message, code, statusCode, request, response) { - super(message); - this.name = "RestError"; - this.code = code; - this.statusCode = statusCode; - this.request = request; - this.response = response; - Object.setPrototypeOf(this, RestError.prototype); - } - /** - * Logging method for util.inspect in Node - */ - [custom]() { - return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; - } -} -/** - * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) - */ -RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; -/** - * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. - */ -RestError.PARSE_ERROR = "PARSE_ERROR"; -//# sourceMappingURL=restError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serializer.js b/node_modules/@azure/core-http/dist-esm/src/serializer.js deleted file mode 100644 index 434c50a0e..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/serializer.js +++ /dev/null @@ -1,918 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/* eslint-disable eqeqeq */ -import * as base64 from "./util/base64"; -import * as utils from "./util/utils"; -import { XML_ATTRKEY, XML_CHARKEY } from "./util/serializer.common"; -// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. -/** - * Used to map raw response objects to final shapes. - * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. - * Also allows pulling values from headers, as well as inserting default values and constants. - */ -export class Serializer { - constructor( - /** - * The provided model mapper. - */ - modelMappers = {}, - /** - * Whether the contents are XML or not. - */ - isXML) { - this.modelMappers = modelMappers; - this.isXML = isXML; - } - /** - * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. - * @param mapper - The definition of data models. - * @param value - The value. - * @param objectName - Name of the object. Used in the error messages. - * @deprecated Removing the constraints validation on client side. - */ - validateConstraints(mapper, value, objectName) { - const failValidation = (constraintName, constraintValue) => { - throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); - }; - if (mapper.constraints && value != undefined) { - const valueAsNumber = value; - const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; - if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { - failValidation("ExclusiveMaximum", ExclusiveMaximum); - } - if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) { - failValidation("ExclusiveMinimum", ExclusiveMinimum); - } - if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) { - failValidation("InclusiveMaximum", InclusiveMaximum); - } - if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) { - failValidation("InclusiveMinimum", InclusiveMinimum); - } - const valueAsArray = value; - if (MaxItems != undefined && valueAsArray.length > MaxItems) { - failValidation("MaxItems", MaxItems); - } - if (MaxLength != undefined && valueAsArray.length > MaxLength) { - failValidation("MaxLength", MaxLength); - } - if (MinItems != undefined && valueAsArray.length < MinItems) { - failValidation("MinItems", MinItems); - } - if (MinLength != undefined && valueAsArray.length < MinLength) { - failValidation("MinLength", MinLength); - } - if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) { - failValidation("MultipleOf", MultipleOf); - } - if (Pattern) { - const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; - if (typeof value !== "string" || value.match(pattern) === null) { - failValidation("Pattern", Pattern); - } - } - if (UniqueItems && - valueAsArray.some((item, i, ar) => ar.indexOf(item) !== i)) { - failValidation("UniqueItems", UniqueItems); - } - } - } - /** - * Serialize the given object based on its metadata defined in the mapper. - * - * @param mapper - The mapper which defines the metadata of the serializable object. - * @param object - A valid Javascript object to be serialized. - * @param objectName - Name of the serialized object. - * @param options - additional options to deserialization. - * @returns A valid serialized Javascript object. - */ - serialize(mapper, object, objectName, options = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - let payload = {}; - const mapperType = mapper.type.name; - if (!objectName) { - objectName = mapper.serializedName; - } - if (mapperType.match(/^Sequence$/i) !== null) { - payload = []; - } - if (mapper.isConstant) { - object = mapper.defaultValue; - } - // This table of allowed values should help explain - // the mapper.required and mapper.nullable properties. - // X means "neither undefined or null are allowed". - // || required - // || true | false - // nullable || ========================== - // true || null | undefined/null - // false || X | undefined - // undefined || X | undefined/null - const { required, nullable } = mapper; - if (required && nullable && object === undefined) { - throw new Error(`${objectName} cannot be undefined.`); - } - if (required && !nullable && object == undefined) { - throw new Error(`${objectName} cannot be null or undefined.`); - } - if (!required && nullable === false && object === null) { - throw new Error(`${objectName} cannot be null.`); - } - if (object == undefined) { - payload = object; - } - else { - if (mapperType.match(/^any$/i) !== null) { - payload = object; - } - else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { - payload = serializeBasicTypes(mapperType, objectName, object); - } - else if (mapperType.match(/^Enum$/i) !== null) { - const enumMapper = mapper; - payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); - } - else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { - payload = serializeDateTypes(mapperType, object, objectName); - } - else if (mapperType.match(/^ByteArray$/i) !== null) { - payload = serializeByteArrayType(objectName, object); - } - else if (mapperType.match(/^Base64Url$/i) !== null) { - payload = serializeBase64UrlType(objectName, object); - } - else if (mapperType.match(/^Sequence$/i) !== null) { - payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } - else if (mapperType.match(/^Dictionary$/i) !== null) { - payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } - else if (mapperType.match(/^Composite$/i) !== null) { - payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } - } - return payload; - } - /** - * Deserialize the given object based on its metadata defined in the mapper. - * - * @param mapper - The mapper which defines the metadata of the serializable object. - * @param responseBody - A valid Javascript entity to be deserialized. - * @param objectName - Name of the deserialized object. - * @param options - Controls behavior of XML parser and builder. - * @returns A valid deserialized Javascript object. - */ - deserialize(mapper, responseBody, objectName, options = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - if (responseBody == undefined) { - if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { - // Edge case for empty XML non-wrapped lists. xml2js can't distinguish - // between the list being empty versus being missing, - // so let's do the more user-friendly thing and return an empty list. - responseBody = []; - } - // specifically check for undefined as default value can be a falsey value `0, "", false, null` - if (mapper.defaultValue !== undefined) { - responseBody = mapper.defaultValue; - } - return responseBody; - } - let payload; - const mapperType = mapper.type.name; - if (!objectName) { - objectName = mapper.serializedName; - } - if (mapperType.match(/^Composite$/i) !== null) { - payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); - } - else { - if (this.isXML) { - const xmlCharKey = updatedOptions.xmlCharKey; - const castResponseBody = responseBody; - /** - * If the mapper specifies this as a non-composite type value but the responseBody contains - * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, - * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. - */ - if (castResponseBody[XML_ATTRKEY] != undefined && - castResponseBody[xmlCharKey] != undefined) { - responseBody = castResponseBody[xmlCharKey]; - } - } - if (mapperType.match(/^Number$/i) !== null) { - payload = parseFloat(responseBody); - if (isNaN(payload)) { - payload = responseBody; - } - } - else if (mapperType.match(/^Boolean$/i) !== null) { - if (responseBody === "true") { - payload = true; - } - else if (responseBody === "false") { - payload = false; - } - else { - payload = responseBody; - } - } - else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { - payload = responseBody; - } - else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { - payload = new Date(responseBody); - } - else if (mapperType.match(/^UnixTime$/i) !== null) { - payload = unixTimeToDate(responseBody); - } - else if (mapperType.match(/^ByteArray$/i) !== null) { - payload = base64.decodeString(responseBody); - } - else if (mapperType.match(/^Base64Url$/i) !== null) { - payload = base64UrlToByteArray(responseBody); - } - else if (mapperType.match(/^Sequence$/i) !== null) { - payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); - } - else if (mapperType.match(/^Dictionary$/i) !== null) { - payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); - } - } - if (mapper.isConstant) { - payload = mapper.defaultValue; - } - return payload; - } -} -function trimEnd(str, ch) { - let len = str.length; - while (len - 1 >= 0 && str[len - 1] === ch) { - --len; - } - return str.substr(0, len); -} -function bufferToBase64Url(buffer) { - if (!buffer) { - return undefined; - } - if (!(buffer instanceof Uint8Array)) { - throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); - } - // Uint8Array to Base64. - const str = base64.encodeByteArray(buffer); - // Base64 to Base64Url. - return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); -} -function base64UrlToByteArray(str) { - if (!str) { - return undefined; - } - if (str && typeof str.valueOf() !== "string") { - throw new Error("Please provide an input of type string for converting to Uint8Array"); - } - // Base64Url to Base64. - str = str.replace(/-/g, "+").replace(/_/g, "/"); - // Base64 to Uint8Array. - return base64.decodeString(str); -} -function splitSerializeName(prop) { - const classes = []; - let partialclass = ""; - if (prop) { - const subwords = prop.split("."); - for (const item of subwords) { - if (item.charAt(item.length - 1) === "\\") { - partialclass += item.substr(0, item.length - 1) + "."; - } - else { - partialclass += item; - classes.push(partialclass); - partialclass = ""; - } - } - } - return classes; -} -function dateToUnixTime(d) { - if (!d) { - return undefined; - } - if (typeof d.valueOf() === "string") { - d = new Date(d); - } - return Math.floor(d.getTime() / 1000); -} -function unixTimeToDate(n) { - if (!n) { - return undefined; - } - return new Date(n * 1000); -} -function serializeBasicTypes(typeName, objectName, value) { - if (value !== null && value !== undefined) { - if (typeName.match(/^Number$/i) !== null) { - if (typeof value !== "number") { - throw new Error(`${objectName} with value ${value} must be of type number.`); - } - } - else if (typeName.match(/^String$/i) !== null) { - if (typeof value.valueOf() !== "string") { - throw new Error(`${objectName} with value "${value}" must be of type string.`); - } - } - else if (typeName.match(/^Uuid$/i) !== null) { - if (!(typeof value.valueOf() === "string" && utils.isValidUuid(value))) { - throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); - } - } - else if (typeName.match(/^Boolean$/i) !== null) { - if (typeof value !== "boolean") { - throw new Error(`${objectName} with value ${value} must be of type boolean.`); - } - } - else if (typeName.match(/^Stream$/i) !== null) { - const objectType = typeof value; - if (objectType !== "string" && - objectType !== "function" && - !(value instanceof ArrayBuffer) && - !ArrayBuffer.isView(value) && - !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob)) { - throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`); - } - } - } - return value; -} -function serializeEnumType(objectName, allowedValues, value) { - if (!allowedValues) { - throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); - } - const isPresent = allowedValues.some((item) => { - if (typeof item.valueOf() === "string") { - return item.toLowerCase() === value.toLowerCase(); - } - return item === value; - }); - if (!isPresent) { - throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); - } - return value; -} -function serializeByteArrayType(objectName, value) { - let returnValue = ""; - if (value != undefined) { - if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); - } - returnValue = base64.encodeByteArray(value); - } - return returnValue; -} -function serializeBase64UrlType(objectName, value) { - let returnValue = ""; - if (value != undefined) { - if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); - } - returnValue = bufferToBase64Url(value) || ""; - } - return returnValue; -} -function serializeDateTypes(typeName, value, objectName) { - if (value != undefined) { - if (typeName.match(/^Date$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); - } - value = - value instanceof Date - ? value.toISOString().substring(0, 10) - : new Date(value).toISOString().substring(0, 10); - } - else if (typeName.match(/^DateTime$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); - } - value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); - } - else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); - } - value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); - } - else if (typeName.match(/^UnixTime$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + - `for it to be serialized in UnixTime/Epoch format.`); - } - value = dateToUnixTime(value); - } - else if (typeName.match(/^TimeSpan$/i) !== null) { - if (!utils.isDuration(value)) { - throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); - } - } - } - return value; -} -function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { - if (!Array.isArray(object)) { - throw new Error(`${objectName} must be of type Array.`); - } - const elementType = mapper.type.element; - if (!elementType || typeof elementType !== "object") { - throw new Error(`element" metadata for an Array must be defined in the ` + - `mapper and it must of type "object" in ${objectName}.`); - } - const tempArray = []; - for (let i = 0; i < object.length; i++) { - const serializedValue = serializer.serialize(elementType, object[i], objectName, options); - if (isXml && elementType.xmlNamespace) { - const xmlnsKey = elementType.xmlNamespacePrefix - ? `xmlns:${elementType.xmlNamespacePrefix}` - : "xmlns"; - if (elementType.type.name === "Composite") { - tempArray[i] = Object.assign({}, serializedValue); - tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; - } - else { - tempArray[i] = {}; - tempArray[i][options.xmlCharKey] = serializedValue; - tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; - } - } - else { - tempArray[i] = serializedValue; - } - } - return tempArray; -} -function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { - if (typeof object !== "object") { - throw new Error(`${objectName} must be of type object.`); - } - const valueType = mapper.type.value; - if (!valueType || typeof valueType !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the ` + - `mapper and it must of type "object" in ${objectName}.`); - } - const tempDictionary = {}; - for (const key of Object.keys(object)) { - const serializedValue = serializer.serialize(valueType, object[key], objectName, options); - // If the element needs an XML namespace we need to add it within the $ property - tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); - } - // Add the namespace to the root element if needed - if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; - const result = tempDictionary; - result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; - return result; - } - return tempDictionary; -} -/** - * Resolves the additionalProperties property from a referenced mapper. - * @param serializer - The serializer containing the entire set of mappers. - * @param mapper - The composite mapper to resolve. - * @param objectName - Name of the object being serialized. - */ -function resolveAdditionalProperties(serializer, mapper, objectName) { - const additionalProperties = mapper.type.additionalProperties; - if (!additionalProperties && mapper.type.className) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); - return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; - } - return additionalProperties; -} -/** - * Finds the mapper referenced by `className`. - * @param serializer - The serializer containing the entire set of mappers - * @param mapper - The composite mapper to resolve - * @param objectName - Name of the object being serialized - */ -function resolveReferencedMapper(serializer, mapper, objectName) { - const className = mapper.type.className; - if (!className) { - throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); - } - return serializer.modelMappers[className]; -} -/** - * Resolves a composite mapper's modelProperties. - * @param serializer - The serializer containing the entire set of mappers - * @param mapper - The composite mapper to resolve - */ -function resolveModelProperties(serializer, mapper, objectName) { - let modelProps = mapper.type.modelProperties; - if (!modelProps) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); - if (!modelMapper) { - throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); - } - modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; - if (!modelProps) { - throw new Error(`modelProperties cannot be null or undefined in the ` + - `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); - } - } - return modelProps; -} -function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { - if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { - mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); - } - if (object != undefined) { - const payload = {}; - const modelProps = resolveModelProperties(serializer, mapper, objectName); - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; - if (propertyMapper.readOnly) { - continue; - } - let propName; - let parentObject = payload; - if (serializer.isXML) { - if (propertyMapper.xmlIsWrapped) { - propName = propertyMapper.xmlName; - } - else { - propName = propertyMapper.xmlElementName || propertyMapper.xmlName; - } - } - else { - const paths = splitSerializeName(propertyMapper.serializedName); - propName = paths.pop(); - for (const pathName of paths) { - const childObject = parentObject[pathName]; - if (childObject == undefined && - (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { - parentObject[pathName] = {}; - } - parentObject = parentObject[pathName]; - } - } - if (parentObject != undefined) { - if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix - ? `xmlns:${mapper.xmlNamespacePrefix}` - : "xmlns"; - parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); - } - const propertyObjectName = propertyMapper.serializedName !== "" - ? objectName + "." + propertyMapper.serializedName - : objectName; - let toSerialize = object[key]; - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); - if (polymorphicDiscriminator && - polymorphicDiscriminator.clientName === key && - toSerialize == undefined) { - toSerialize = mapper.serializedName; - } - const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); - if (serializedValue !== undefined && propName != undefined) { - const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); - if (isXml && propertyMapper.xmlIsAttribute) { - // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. - // This keeps things simple while preventing name collision - // with names in user documents. - parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; - parentObject[XML_ATTRKEY][propName] = serializedValue; - } - else if (isXml && propertyMapper.xmlIsWrapped) { - parentObject[propName] = { [propertyMapper.xmlElementName]: value }; - } - else { - parentObject[propName] = value; - } - } - } - } - const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); - if (additionalPropertiesMapper) { - const propNames = Object.keys(modelProps); - for (const clientPropName in object) { - const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); - if (isAdditionalProperty) { - payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); - } - } - } - return payload; - } - return object; -} -function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { - if (!isXml || !propertyMapper.xmlNamespace) { - return serializedValue; - } - const xmlnsKey = propertyMapper.xmlNamespacePrefix - ? `xmlns:${propertyMapper.xmlNamespacePrefix}` - : "xmlns"; - const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; - if (["Composite"].includes(propertyMapper.type.name)) { - if (serializedValue[XML_ATTRKEY]) { - return serializedValue; - } - else { - const result = Object.assign({}, serializedValue); - result[XML_ATTRKEY] = xmlNamespace; - return result; - } - } - const result = {}; - result[options.xmlCharKey] = serializedValue; - result[XML_ATTRKEY] = xmlNamespace; - return result; -} -function isSpecialXmlProperty(propertyName, options) { - return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName); -} -function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { - var _a, _b; - const xmlCharKey = (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { - mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); - } - const modelProps = resolveModelProperties(serializer, mapper, objectName); - let instance = {}; - const handledPropertyNames = []; - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; - const paths = splitSerializeName(modelProps[key].serializedName); - handledPropertyNames.push(paths[0]); - const { serializedName, xmlName, xmlElementName } = propertyMapper; - let propertyObjectName = objectName; - if (serializedName !== "" && serializedName !== undefined) { - propertyObjectName = objectName + "." + serializedName; - } - const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; - if (headerCollectionPrefix) { - const dictionary = {}; - for (const headerKey of Object.keys(responseBody)) { - if (headerKey.startsWith(headerCollectionPrefix)) { - dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); - } - handledPropertyNames.push(headerKey); - } - instance[key] = dictionary; - } - else if (serializer.isXML) { - if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { - instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); - } - else if (propertyMapper.xmlIsMsText) { - if (responseBody[xmlCharKey] !== undefined) { - instance[key] = responseBody[xmlCharKey]; - } - else if (typeof responseBody === "string") { - // The special case where xml parser parses "content" into JSON of - // `{ name: "content"}` instead of `{ name: { "_": "content" }}` - instance[key] = responseBody; - } - } - else { - const propertyName = xmlElementName || xmlName || serializedName; - if (propertyMapper.xmlIsWrapped) { - /* a list of wrapped by - For the xml example below - - ... - ... - - the responseBody has - { - Cors: { - CorsRule: [{...}, {...}] - } - } - xmlName is "Cors" and xmlElementName is"CorsRule". - */ - const wrapped = responseBody[xmlName]; - const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; - instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); - handledPropertyNames.push(xmlName); - } - else { - const property = responseBody[propertyName]; - instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); - handledPropertyNames.push(propertyName); - } - } - } - else { - // deserialize the property if it is present in the provided responseBody instance - let propertyInstance; - let res = responseBody; - // traversing the object step by step. - for (const item of paths) { - if (!res) - break; - res = res[item]; - } - propertyInstance = res; - const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; - // checking that the model property name (key)(ex: "fishtype") and the - // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") - // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") - // is a better approach. The generator is not consistent with escaping '\.' in the - // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator - // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, - // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and - // the transformation of model property name (ex: "fishtype") is done consistently. - // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. - if (polymorphicDiscriminator && - key === polymorphicDiscriminator.clientName && - propertyInstance == undefined) { - propertyInstance = mapper.serializedName; - } - let serializedValue; - // paging - if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { - propertyInstance = responseBody[key]; - const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); - // Copy over any properties that have already been added into the instance, where they do - // not exist on the newly de-serialized array - for (const [k, v] of Object.entries(instance)) { - if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { - arrayInstance[k] = v; - } - } - instance = arrayInstance; - } - else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { - serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); - instance[key] = serializedValue; - } - } - } - const additionalPropertiesMapper = mapper.type.additionalProperties; - if (additionalPropertiesMapper) { - const isAdditionalProperty = (responsePropName) => { - for (const clientPropName in modelProps) { - const paths = splitSerializeName(modelProps[clientPropName].serializedName); - if (paths[0] === responsePropName) { - return false; - } - } - return true; - }; - for (const responsePropName in responseBody) { - if (isAdditionalProperty(responsePropName)) { - instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); - } - } - } - else if (responseBody) { - for (const key of Object.keys(responseBody)) { - if (instance[key] === undefined && - !handledPropertyNames.includes(key) && - !isSpecialXmlProperty(key, options)) { - instance[key] = responseBody[key]; - } - } - } - return instance; -} -function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { - const value = mapper.type.value; - if (!value || typeof value !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the ` + - `mapper and it must of type "object" in ${objectName}`); - } - if (responseBody) { - const tempDictionary = {}; - for (const key of Object.keys(responseBody)) { - tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); - } - return tempDictionary; - } - return responseBody; -} -function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { - const element = mapper.type.element; - if (!element || typeof element !== "object") { - throw new Error(`element" metadata for an Array must be defined in the ` + - `mapper and it must of type "object" in ${objectName}`); - } - if (responseBody) { - if (!Array.isArray(responseBody)) { - // xml2js will interpret a single element array as just the element, so force it to be an array - responseBody = [responseBody]; - } - const tempArray = []; - for (let i = 0; i < responseBody.length; i++) { - tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); - } - return tempArray; - } - return responseBody; -} -function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); - if (polymorphicDiscriminator) { - const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; - if (discriminatorName != undefined) { - const discriminatorValue = object[discriminatorName]; - if (discriminatorValue != undefined) { - const typeName = mapper.type.uberParent || mapper.type.className; - const indexDiscriminator = discriminatorValue === typeName - ? discriminatorValue - : typeName + "." + discriminatorValue; - const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; - if (polymorphicMapper) { - mapper = polymorphicMapper; - } - } - } - } - return mapper; -} -function getPolymorphicDiscriminatorRecursively(serializer, mapper) { - return (mapper.type.polymorphicDiscriminator || - getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || - getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); -} -function getPolymorphicDiscriminatorSafely(serializer, typeName) { - return (typeName && - serializer.modelMappers[typeName] && - serializer.modelMappers[typeName].type.polymorphicDiscriminator); -} -/** - * Utility function that serializes an object that might contain binary information into a plain object, array or a string. - */ -export function serializeObject(toSerialize) { - const castToSerialize = toSerialize; - if (toSerialize == undefined) - return undefined; - if (toSerialize instanceof Uint8Array) { - toSerialize = base64.encodeByteArray(toSerialize); - return toSerialize; - } - else if (toSerialize instanceof Date) { - return toSerialize.toISOString(); - } - else if (Array.isArray(toSerialize)) { - const array = []; - for (let i = 0; i < toSerialize.length; i++) { - array.push(serializeObject(toSerialize[i])); - } - return array; - } - else if (typeof toSerialize === "object") { - const dictionary = {}; - for (const property in toSerialize) { - dictionary[property] = serializeObject(castToSerialize[property]); - } - return dictionary; - } - return toSerialize; -} -/** - * Utility function to create a K:V from a list of strings - */ -function strEnum(o) { - const result = {}; - for (const key of o) { - result[key] = key; - } - return result; -} -/** - * String enum containing the string types of property mappers. - */ -// eslint-disable-next-line @typescript-eslint/no-redeclare -export const MapperType = strEnum([ - "Base64Url", - "Boolean", - "ByteArray", - "Composite", - "Date", - "DateTime", - "DateTimeRfc1123", - "Dictionary", - "Enum", - "Number", - "Object", - "Sequence", - "String", - "Stream", - "TimeSpan", - "UnixTime", -]); -//# sourceMappingURL=serializer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/serviceClient.js b/node_modules/@azure/core-http/dist-esm/src/serviceClient.js deleted file mode 100644 index 13e507454..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/serviceClient.js +++ /dev/null @@ -1,619 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import * as utils from "./util/utils"; -import { MapperType } from "./serializer"; -import { DefaultDeserializationOptions, deserializationPolicy, } from "./policies/deserializationPolicy"; -import { DefaultKeepAliveOptions, keepAlivePolicy } from "./policies/keepAlivePolicy"; -import { DefaultRedirectOptions, redirectPolicy } from "./policies/redirectPolicy"; -import { DefaultRetryOptions, exponentialRetryPolicy } from "./policies/exponentialRetryPolicy"; -import { logPolicy } from "./policies/logPolicy"; -import { getPathStringFromParameter, getPathStringFromParameterPath, } from "./operationParameter"; -import { getStreamResponseStatusCodes } from "./operationSpec"; -import { WebResource, isWebResourceLike, } from "./webResource"; -import { RequestPolicyOptions, } from "./policies/requestPolicy"; -import { XML_ATTRKEY, XML_CHARKEY } from "./util/serializer.common"; -import { isNode } from "@azure/core-util"; -import { isTokenCredential } from "@azure/core-auth"; -import { getDefaultUserAgentHeaderName, getDefaultUserAgentValue, userAgentPolicy, } from "./policies/userAgentPolicy"; -import { QueryCollectionFormat } from "./queryCollectionFormat"; -import { URLBuilder } from "./url"; -import { bearerTokenAuthenticationPolicy } from "./policies/bearerTokenAuthenticationPolicy"; -import { disableResponseDecompressionPolicy } from "./policies/disableResponseDecompressionPolicy"; -import { generateClientRequestIdPolicy } from "./policies/generateClientRequestIdPolicy"; -import { getCachedDefaultHttpClient } from "./httpClientCache"; -import { logger } from "./log"; -import { ndJsonPolicy } from "./policies/ndJsonPolicy"; -import { proxyPolicy } from "./policies/proxyPolicy"; -import { rpRegistrationPolicy } from "./policies/rpRegistrationPolicy"; -import { signingPolicy } from "./policies/signingPolicy"; -import { stringifyXML } from "./util/xml"; -import { systemErrorRetryPolicy } from "./policies/systemErrorRetryPolicy"; -import { throttlingRetryPolicy } from "./policies/throttlingRetryPolicy"; -import { tracingPolicy } from "./policies/tracingPolicy"; -/** - * ServiceClient sends service requests and receives responses. - */ -export class ServiceClient { - /** - * The ServiceClient constructor - * @param credentials - The credentials used for authentication with the service. - * @param options - The service client options that govern the behavior of the client. - */ - constructor(credentials, - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */ - options) { - if (!options) { - options = {}; - } - this._withCredentials = options.withCredentials || false; - this._httpClient = options.httpClient || getCachedDefaultHttpClient(); - this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); - let requestPolicyFactories; - if (Array.isArray(options.requestPolicyFactories)) { - logger.info("ServiceClient: using custom request policies"); - requestPolicyFactories = options.requestPolicyFactories; - } - else { - let authPolicyFactory = undefined; - if (isTokenCredential(credentials)) { - logger.info("ServiceClient: creating bearer token authentication policy from provided credentials"); - // Create a wrapped RequestPolicyFactory here so that we can provide the - // correct scope to the BearerTokenAuthenticationPolicy at the first time - // one is requested. This is needed because generated ServiceClient - // implementations do not set baseUri until after ServiceClient's constructor - // is finished, leaving baseUri empty at the time when it is needed to - // build the correct scope name. - const wrappedPolicyFactory = () => { - let bearerTokenPolicyFactory = undefined; - // eslint-disable-next-line @typescript-eslint/no-this-alias - const serviceClient = this; - const serviceClientOptions = options; - return { - create(nextPolicy, createOptions) { - const credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); - if (!credentialScopes) { - throw new Error(`When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`); - } - if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) { - bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); - } - return bearerTokenPolicyFactory.create(nextPolicy, createOptions); - }, - }; - }; - authPolicyFactory = wrappedPolicyFactory(); - } - else if (credentials && typeof credentials.signRequest === "function") { - logger.info("ServiceClient: creating signing policy from provided credentials"); - authPolicyFactory = signingPolicy(credentials); - } - else if (credentials !== undefined && credentials !== null) { - throw new Error("The credentials argument must implement the TokenCredential interface"); - } - logger.info("ServiceClient: using default request policies"); - requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options); - if (options.requestPolicyFactories) { - // options.requestPolicyFactories can also be a function that manipulates - // the default requestPolicyFactories array - const newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); - if (newRequestPolicyFactories) { - requestPolicyFactories = newRequestPolicyFactories; - } - } - } - this._requestPolicyFactories = requestPolicyFactories; - } - /** - * Send the provided httpRequest. - */ - sendRequest(options) { - if (options === null || options === undefined || typeof options !== "object") { - throw new Error("options cannot be null or undefined and it must be of type object."); - } - let httpRequest; - try { - if (isWebResourceLike(options)) { - options.validateRequestProperties(); - httpRequest = options; - } - else { - httpRequest = new WebResource(); - httpRequest = httpRequest.prepare(options); - } - } - catch (error) { - return Promise.reject(error); - } - let httpPipeline = this._httpClient; - if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { - for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { - httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); - } - } - return httpPipeline.sendRequest(httpRequest); - } - /** - * Send an HTTP request that is populated using the provided OperationSpec. - * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. - * @param operationSpec - The OperationSpec to use to populate the httpRequest. - * @param callback - The callback to call when the response is received. - */ - async sendOperationRequest(operationArguments, operationSpec, callback) { - var _a; - if (typeof operationArguments.options === "function") { - callback = operationArguments.options; - operationArguments.options = undefined; - } - const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; - const httpRequest = new WebResource(); - let result; - try { - const baseUri = operationSpec.baseUrl || this.baseUri; - if (!baseUri) { - throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); - } - httpRequest.method = operationSpec.httpMethod; - httpRequest.operationSpec = operationSpec; - const requestUrl = URLBuilder.parse(baseUri); - if (operationSpec.path) { - requestUrl.appendPath(operationSpec.path); - } - if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { - for (const urlParameter of operationSpec.urlParameters) { - let urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); - urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); - if (!urlParameter.skipEncoding) { - urlParameterValue = encodeURIComponent(urlParameterValue); - } - requestUrl.replaceAll(`{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, urlParameterValue); - } - } - if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { - for (const queryParameter of operationSpec.queryParameters) { - let queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); - if (queryParameterValue !== undefined && queryParameterValue !== null) { - queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); - if (queryParameter.collectionFormat !== undefined && - queryParameter.collectionFormat !== null) { - if (queryParameter.collectionFormat === QueryCollectionFormat.Multi) { - if (queryParameterValue.length === 0) { - // The collection is empty, no need to try serializing the current queryParam - continue; - } - else { - for (const index in queryParameterValue) { - const item = queryParameterValue[index]; - queryParameterValue[index] = - item === undefined || item === null ? "" : item.toString(); - } - } - } - else if (queryParameter.collectionFormat === QueryCollectionFormat.Ssv || - queryParameter.collectionFormat === QueryCollectionFormat.Tsv) { - queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); - } - } - if (!queryParameter.skipEncoding) { - if (Array.isArray(queryParameterValue)) { - for (const index in queryParameterValue) { - if (queryParameterValue[index] !== undefined && - queryParameterValue[index] !== null) { - queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); - } - } - } - else { - queryParameterValue = encodeURIComponent(queryParameterValue); - } - } - if (queryParameter.collectionFormat !== undefined && - queryParameter.collectionFormat !== null && - queryParameter.collectionFormat !== QueryCollectionFormat.Multi && - queryParameter.collectionFormat !== QueryCollectionFormat.Ssv && - queryParameter.collectionFormat !== QueryCollectionFormat.Tsv) { - queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); - } - requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); - } - } - } - httpRequest.url = requestUrl.toString(); - const contentType = operationSpec.contentType || this.requestContentType; - if (contentType && operationSpec.requestBody) { - httpRequest.headers.set("Content-Type", contentType); - } - if (operationSpec.headerParameters) { - for (const headerParameter of operationSpec.headerParameters) { - let headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); - if (headerValue !== undefined && headerValue !== null) { - headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); - const headerCollectionPrefix = headerParameter.mapper - .headerCollectionPrefix; - if (headerCollectionPrefix) { - for (const key of Object.keys(headerValue)) { - httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); - } - } - else { - httpRequest.headers.set(headerParameter.mapper.serializedName || - getPathStringFromParameter(headerParameter), headerValue); - } - } - } - } - const options = operationArguments.options; - if (options) { - if (options.customHeaders) { - for (const customHeaderName in options.customHeaders) { - httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); - } - } - if (options.abortSignal) { - httpRequest.abortSignal = options.abortSignal; - } - if (options.timeout) { - httpRequest.timeout = options.timeout; - } - if (options.onUploadProgress) { - httpRequest.onUploadProgress = options.onUploadProgress; - } - if (options.onDownloadProgress) { - httpRequest.onDownloadProgress = options.onDownloadProgress; - } - if (options.spanOptions) { - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - httpRequest.spanOptions = options.spanOptions; - } - if (options.tracingContext) { - httpRequest.tracingContext = options.tracingContext; - } - if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { - httpRequest.shouldDeserialize = options.shouldDeserialize; - } - } - httpRequest.withCredentials = this._withCredentials; - serializeRequestBody(this, httpRequest, operationArguments, operationSpec); - if (httpRequest.streamResponseStatusCodes === undefined) { - httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); - } - let rawResponse; - let sendRequestError; - try { - rawResponse = await this.sendRequest(httpRequest); - } - catch (error) { - sendRequestError = error; - } - if (sendRequestError) { - if (sendRequestError.response) { - sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || - operationSpec.responses["default"]); - } - result = Promise.reject(sendRequestError); - } - else { - result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); - } - } - catch (error) { - result = Promise.reject(error); - } - const cb = callback; - if (cb) { - result - .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) - .catch((err) => cb(err)); - } - return result; - } -} -export function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { - var _a, _b, _c, _d, _e, _f; - const serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; - const updatedOptions = { - rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", - includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, - xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, - }; - const xmlCharKey = serializerOptions.xmlCharKey; - if (operationSpec.requestBody && operationSpec.requestBody.mapper) { - httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); - const bodyMapper = operationSpec.requestBody.mapper; - const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } = bodyMapper; - const typeName = bodyMapper.type.name; - try { - if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { - const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); - httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions); - const isStream = typeName === MapperType.Stream; - if (operationSpec.isXML) { - const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; - const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); - if (typeName === MapperType.Sequence) { - httpRequest.body = stringifyXML(utils.prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { - rootName: xmlName || serializedName, - xmlCharKey, - }); - } - else if (!isStream) { - httpRequest.body = stringifyXML(value, { - rootName: xmlName || serializedName, - xmlCharKey, - }); - } - } - else if (typeName === MapperType.String && - (((_f = operationSpec.contentType) === null || _f === void 0 ? void 0 : _f.match("text/plain")) || operationSpec.mediaType === "text")) { - // the String serializer has validated that request body is a string - // so just send the string. - return; - } - else if (!isStream) { - httpRequest.body = JSON.stringify(httpRequest.body); - } - } - } - catch (error) { - throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); - } - } - else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { - httpRequest.formData = {}; - for (const formDataParameter of operationSpec.formDataParameters) { - const formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); - if (formDataParameterValue !== undefined && formDataParameterValue !== null) { - const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); - httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); - } - } - } -} -/** - * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself - */ -function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { - // Composite and Sequence schemas already got their root namespace set during serialization - // We just need to add xmlns to the other schema types - if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { - const result = {}; - result[options.xmlCharKey] = serializedValue; - result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; - return result; - } - return serializedValue; -} -function getValueOrFunctionResult(value, defaultValueCreator) { - let result; - if (typeof value === "string") { - result = value; - } - else { - result = defaultValueCreator(); - if (typeof value === "function") { - result = value(result); - } - } - return result; -} -function createDefaultRequestPolicyFactories(authPolicyFactory, options) { - const factories = []; - if (options.generateClientRequestIdHeader) { - factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); - } - if (authPolicyFactory) { - factories.push(authPolicyFactory); - } - const userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); - const userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); - if (userAgentHeaderName && userAgentHeaderValue) { - factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); - } - factories.push(redirectPolicy()); - factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); - if (!options.noRetryPolicy) { - factories.push(exponentialRetryPolicy()); - factories.push(systemErrorRetryPolicy()); - factories.push(throttlingRetryPolicy()); - } - factories.push(deserializationPolicy(options.deserializationContentTypes)); - if (isNode) { - factories.push(proxyPolicy(options.proxySettings)); - } - factories.push(logPolicy({ logger: logger.info })); - return factories; -} -/** - * Creates an HTTP pipeline based on the given options. - * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. - * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. - * @returns A set of options that can be passed to create a new {@link ServiceClient}. - */ -export function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { - const requestPolicyFactories = []; - if (pipelineOptions.sendStreamingJson) { - requestPolicyFactories.push(ndJsonPolicy()); - } - let userAgentValue = undefined; - if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { - const userAgentInfo = []; - userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix); - // Add the default user agent value if it isn't already specified - // by the userAgentPrefix option. - const defaultUserAgentInfo = getDefaultUserAgentValue(); - if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) { - userAgentInfo.push(defaultUserAgentInfo); - } - userAgentValue = userAgentInfo.join(" "); - } - const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); - const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); - const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); - if (isNode) { - requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); - } - const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); - const loggingOptions = Object.assign({}, pipelineOptions.loggingOptions); - requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs)); - if (redirectOptions.handleRedirects) { - requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries)); - } - if (authPolicyFactory) { - requestPolicyFactories.push(authPolicyFactory); - } - requestPolicyFactories.push(logPolicy(loggingOptions)); - if (isNode && pipelineOptions.decompressResponse === false) { - requestPolicyFactories.push(disableResponseDecompressionPolicy()); - } - return { - httpClient: pipelineOptions.httpClient, - requestPolicyFactories, - }; -} -/** - * Get the property parent for the property at the provided path when starting with the provided - * parent object. - */ -export function getPropertyParent(parent, propertyPath) { - if (parent && propertyPath) { - const propertyPathLength = propertyPath.length; - for (let i = 0; i < propertyPathLength - 1; ++i) { - const propertyName = propertyPath[i]; - if (!parent[propertyName]) { - parent[propertyName] = {}; - } - parent = parent[propertyName]; - } - } - return parent; -} -function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { - return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); -} -export function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { - var _a; - let value; - if (typeof parameterPath === "string") { - parameterPath = [parameterPath]; - } - const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; - if (Array.isArray(parameterPath)) { - if (parameterPath.length > 0) { - if (parameterMapper.isConstant) { - value = parameterMapper.defaultValue; - } - else { - let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); - if (!propertySearchResult.propertyFound) { - propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); - } - let useDefaultValue = false; - if (!propertySearchResult.propertyFound) { - useDefaultValue = - parameterMapper.required || - (parameterPath[0] === "options" && parameterPath.length === 2); - } - value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; - } - // Serialize just for validation purposes. - const parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); - serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions); - } - } - else { - if (parameterMapper.required) { - value = {}; - } - for (const propertyName in parameterPath) { - const propertyMapper = parameterMapper.type.modelProperties[propertyName]; - const propertyPath = parameterPath[propertyName]; - const propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); - // Serialize just for validation purposes. - const propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); - serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions); - if (propertyValue !== undefined && propertyValue !== null) { - if (!value) { - value = {}; - } - value[propertyName] = propertyValue; - } - } - } - return value; -} -function getPropertyFromParameterPath(parent, parameterPath) { - const result = { propertyFound: false }; - let i = 0; - for (; i < parameterPath.length; ++i) { - const parameterPathPart = parameterPath[i]; - // Make sure to check inherited properties too, so don't use hasOwnProperty(). - if (parent !== undefined && parent !== null && parameterPathPart in parent) { - parent = parent[parameterPathPart]; - } - else { - break; - } - } - if (i === parameterPath.length) { - result.propertyValue = parent; - result.propertyFound = true; - } - return result; -} -/** - * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). - * @param _response - Wrapper object for http response. - * @param responseSpec - Mappers for how to parse the response properties. - * @returns - A normalized response object. - */ -export function flattenResponse(_response, responseSpec) { - const parsedHeaders = _response.parsedHeaders; - const bodyMapper = responseSpec && responseSpec.bodyMapper; - const addOperationResponse = (obj) => { - return Object.defineProperty(obj, "_response", { - value: _response, - }); - }; - if (bodyMapper) { - const typeName = bodyMapper.type.name; - if (typeName === "Stream") { - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); - } - const modelProperties = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; - const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); - if (typeName === "Sequence" || isPageableResponse) { - const arrayResponse = [...(_response.parsedBody || [])]; - for (const key of Object.keys(modelProperties)) { - if (modelProperties[key].serializedName) { - arrayResponse[key] = _response.parsedBody[key]; - } - } - if (parsedHeaders) { - for (const key of Object.keys(parsedHeaders)) { - arrayResponse[key] = parsedHeaders[key]; - } - } - addOperationResponse(arrayResponse); - return arrayResponse; - } - if (typeName === "Composite" || typeName === "Dictionary") { - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); - } - } - if (bodyMapper || - _response.request.method === "HEAD" || - utils.isPrimitiveType(_response.parsedBody)) { - // primitive body types and HEAD booleans - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { body: _response.parsedBody })); - } - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); -} -function getCredentialScopes(options, baseUri) { - if (options === null || options === void 0 ? void 0 : options.credentialScopes) { - return options.credentialScopes; - } - if (baseUri) { - return `${baseUri}/.default`; - } - return undefined; -} -//# sourceMappingURL=serviceClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/url.js b/node_modules/@azure/core-http/dist-esm/src/url.js deleted file mode 100644 index 35503a507..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/url.js +++ /dev/null @@ -1,597 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { replaceAll } from "./util/utils"; -/** - * A class that handles the query portion of a URLBuilder. - */ -export class URLQuery { - constructor() { - this._rawQuery = {}; - } - /** - * Get whether or not there any query parameters in this URLQuery. - */ - any() { - return Object.keys(this._rawQuery).length > 0; - } - /** - * Get the keys of the query string. - */ - keys() { - return Object.keys(this._rawQuery); - } - /** - * Set a query parameter with the provided name and value. If the parameterValue is undefined or - * empty, then this will attempt to remove an existing query parameter with the provided - * parameterName. - */ - set(parameterName, parameterValue) { - const caseParameterValue = parameterValue; - if (parameterName) { - if (caseParameterValue !== undefined && caseParameterValue !== null) { - const newValue = Array.isArray(caseParameterValue) - ? caseParameterValue - : caseParameterValue.toString(); - this._rawQuery[parameterName] = newValue; - } - else { - delete this._rawQuery[parameterName]; - } - } - } - /** - * Get the value of the query parameter with the provided name. If no parameter exists with the - * provided parameter name, then undefined will be returned. - */ - get(parameterName) { - return parameterName ? this._rawQuery[parameterName] : undefined; - } - /** - * Get the string representation of this query. The return value will not start with a "?". - */ - toString() { - let result = ""; - for (const parameterName in this._rawQuery) { - if (result) { - result += "&"; - } - const parameterValue = this._rawQuery[parameterName]; - if (Array.isArray(parameterValue)) { - const parameterStrings = []; - for (const parameterValueElement of parameterValue) { - parameterStrings.push(`${parameterName}=${parameterValueElement}`); - } - result += parameterStrings.join("&"); - } - else { - result += `${parameterName}=${parameterValue}`; - } - } - return result; - } - /** - * Parse a URLQuery from the provided text. - */ - static parse(text) { - const result = new URLQuery(); - if (text) { - if (text.startsWith("?")) { - text = text.substring(1); - } - let currentState = "ParameterName"; - let parameterName = ""; - let parameterValue = ""; - for (let i = 0; i < text.length; ++i) { - const currentCharacter = text[i]; - switch (currentState) { - case "ParameterName": - switch (currentCharacter) { - case "=": - currentState = "ParameterValue"; - break; - case "&": - parameterName = ""; - parameterValue = ""; - break; - default: - parameterName += currentCharacter; - break; - } - break; - case "ParameterValue": - switch (currentCharacter) { - case "&": - result.set(parameterName, parameterValue); - parameterName = ""; - parameterValue = ""; - currentState = "ParameterName"; - break; - default: - parameterValue += currentCharacter; - break; - } - break; - default: - throw new Error("Unrecognized URLQuery parse state: " + currentState); - } - } - if (currentState === "ParameterValue") { - result.set(parameterName, parameterValue); - } - } - return result; - } -} -/** - * A class that handles creating, modifying, and parsing URLs. - */ -export class URLBuilder { - /** - * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL - * (such as a host, port, path, or query), those parts will be added to this URL as well. - */ - setScheme(scheme) { - if (!scheme) { - this._scheme = undefined; - } - else { - this.set(scheme, "SCHEME"); - } - } - /** - * Get the scheme that has been set in this URL. - */ - getScheme() { - return this._scheme; - } - /** - * Set the host for this URL. If the provided host contains other parts of a URL (such as a - * port, path, or query), those parts will be added to this URL as well. - */ - setHost(host) { - if (!host) { - this._host = undefined; - } - else { - this.set(host, "SCHEME_OR_HOST"); - } - } - /** - * Get the host that has been set in this URL. - */ - getHost() { - return this._host; - } - /** - * Set the port for this URL. If the provided port contains other parts of a URL (such as a - * path or query), those parts will be added to this URL as well. - */ - setPort(port) { - if (port === undefined || port === null || port === "") { - this._port = undefined; - } - else { - this.set(port.toString(), "PORT"); - } - } - /** - * Get the port that has been set in this URL. - */ - getPort() { - return this._port; - } - /** - * Set the path for this URL. If the provided path contains a query, then it will be added to - * this URL as well. - */ - setPath(path) { - if (!path) { - this._path = undefined; - } - else { - const schemeIndex = path.indexOf("://"); - if (schemeIndex !== -1) { - const schemeStart = path.lastIndexOf("/", schemeIndex); - // Make sure to only grab the URL part of the path before setting the state back to SCHEME - // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" - this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); - } - else { - this.set(path, "PATH"); - } - } - } - /** - * Append the provided path to this URL's existing path. If the provided path contains a query, - * then it will be added to this URL as well. - */ - appendPath(path) { - if (path) { - let currentPath = this.getPath(); - if (currentPath) { - if (!currentPath.endsWith("/")) { - currentPath += "/"; - } - if (path.startsWith("/")) { - path = path.substring(1); - } - path = currentPath + path; - } - this.set(path, "PATH"); - } - } - /** - * Get the path that has been set in this URL. - */ - getPath() { - return this._path; - } - /** - * Set the query in this URL. - */ - setQuery(query) { - if (!query) { - this._query = undefined; - } - else { - this._query = URLQuery.parse(query); - } - } - /** - * Set a query parameter with the provided name and value in this URL's query. If the provided - * query parameter value is undefined or empty, then the query parameter will be removed if it - * existed. - */ - setQueryParameter(queryParameterName, queryParameterValue) { - if (queryParameterName) { - if (!this._query) { - this._query = new URLQuery(); - } - this._query.set(queryParameterName, queryParameterValue); - } - } - /** - * Get the value of the query parameter with the provided query parameter name. If no query - * parameter exists with the provided name, then undefined will be returned. - */ - getQueryParameterValue(queryParameterName) { - return this._query ? this._query.get(queryParameterName) : undefined; - } - /** - * Get the query in this URL. - */ - getQuery() { - return this._query ? this._query.toString() : undefined; - } - /** - * Set the parts of this URL by parsing the provided text using the provided startState. - */ - set(text, startState) { - const tokenizer = new URLTokenizer(text, startState); - while (tokenizer.next()) { - const token = tokenizer.current(); - let tokenPath; - if (token) { - switch (token.type) { - case "SCHEME": - this._scheme = token.text || undefined; - break; - case "HOST": - this._host = token.text || undefined; - break; - case "PORT": - this._port = token.text || undefined; - break; - case "PATH": - tokenPath = token.text || undefined; - if (!this._path || this._path === "/" || tokenPath !== "/") { - this._path = tokenPath; - } - break; - case "QUERY": - this._query = URLQuery.parse(token.text); - break; - default: - throw new Error(`Unrecognized URLTokenType: ${token.type}`); - } - } - } - } - /** - * Serializes the URL as a string. - * @returns the URL as a string. - */ - toString() { - let result = ""; - if (this._scheme) { - result += `${this._scheme}://`; - } - if (this._host) { - result += this._host; - } - if (this._port) { - result += `:${this._port}`; - } - if (this._path) { - if (!this._path.startsWith("/")) { - result += "/"; - } - result += this._path; - } - if (this._query && this._query.any()) { - result += `?${this._query.toString()}`; - } - return result; - } - /** - * If the provided searchValue is found in this URLBuilder, then replace it with the provided - * replaceValue. - */ - replaceAll(searchValue, replaceValue) { - if (searchValue) { - this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); - this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); - this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); - this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); - this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); - } - } - /** - * Parses a given string URL into a new {@link URLBuilder}. - */ - static parse(text) { - const result = new URLBuilder(); - result.set(text, "SCHEME_OR_HOST"); - return result; - } -} -export class URLToken { - constructor(text, type) { - this.text = text; - this.type = type; - } - static scheme(text) { - return new URLToken(text, "SCHEME"); - } - static host(text) { - return new URLToken(text, "HOST"); - } - static port(text) { - return new URLToken(text, "PORT"); - } - static path(text) { - return new URLToken(text, "PATH"); - } - static query(text) { - return new URLToken(text, "QUERY"); - } -} -/** - * Get whether or not the provided character (single character string) is an alphanumeric (letter or - * digit) character. - */ -export function isAlphaNumericCharacter(character) { - const characterCode = character.charCodeAt(0); - return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || - (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || - (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); -} -/** - * A class that tokenizes URL strings. - */ -export class URLTokenizer { - constructor(_text, state) { - this._text = _text; - this._textLength = _text ? _text.length : 0; - this._currentState = state !== undefined && state !== null ? state : "SCHEME_OR_HOST"; - this._currentIndex = 0; - } - /** - * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer - * hasn't started or has finished tokenizing. - */ - current() { - return this._currentToken; - } - /** - * Advance to the next URLToken and return whether or not a URLToken was found. - */ - next() { - if (!hasCurrentCharacter(this)) { - this._currentToken = undefined; - } - else { - switch (this._currentState) { - case "SCHEME": - nextScheme(this); - break; - case "SCHEME_OR_HOST": - nextSchemeOrHost(this); - break; - case "HOST": - nextHost(this); - break; - case "PORT": - nextPort(this); - break; - case "PATH": - nextPath(this); - break; - case "QUERY": - nextQuery(this); - break; - default: - throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); - } - } - return !!this._currentToken; - } -} -/** - * Read the remaining characters from this Tokenizer's character stream. - */ -function readRemaining(tokenizer) { - let result = ""; - if (tokenizer._currentIndex < tokenizer._textLength) { - result = tokenizer._text.substring(tokenizer._currentIndex); - tokenizer._currentIndex = tokenizer._textLength; - } - return result; -} -/** - * Whether or not this URLTokenizer has a current character. - */ -function hasCurrentCharacter(tokenizer) { - return tokenizer._currentIndex < tokenizer._textLength; -} -/** - * Get the character in the text string at the current index. - */ -function getCurrentCharacter(tokenizer) { - return tokenizer._text[tokenizer._currentIndex]; -} -/** - * Advance to the character in text that is "step" characters ahead. If no step value is provided, - * then step will default to 1. - */ -function nextCharacter(tokenizer, step) { - if (hasCurrentCharacter(tokenizer)) { - if (!step) { - step = 1; - } - tokenizer._currentIndex += step; - } -} -/** - * Starting with the current character, peek "charactersToPeek" number of characters ahead in this - * Tokenizer's stream of characters. - */ -function peekCharacters(tokenizer, charactersToPeek) { - let endIndex = tokenizer._currentIndex + charactersToPeek; - if (tokenizer._textLength < endIndex) { - endIndex = tokenizer._textLength; - } - return tokenizer._text.substring(tokenizer._currentIndex, endIndex); -} -/** - * Read characters from this Tokenizer until the end of the stream or until the provided condition - * is false when provided the current character. - */ -function readWhile(tokenizer, condition) { - let result = ""; - while (hasCurrentCharacter(tokenizer)) { - const currentCharacter = getCurrentCharacter(tokenizer); - if (!condition(currentCharacter)) { - break; - } - else { - result += currentCharacter; - nextCharacter(tokenizer); - } - } - return result; -} -/** - * Read characters from this Tokenizer until a non-alphanumeric character or the end of the - * character stream is reached. - */ -function readWhileLetterOrDigit(tokenizer) { - return readWhile(tokenizer, (character) => isAlphaNumericCharacter(character)); -} -/** - * Read characters from this Tokenizer until one of the provided terminating characters is read or - * the end of the character stream is reached. - */ -function readUntilCharacter(tokenizer, ...terminatingCharacters) { - return readWhile(tokenizer, (character) => terminatingCharacters.indexOf(character) === -1); -} -function nextScheme(tokenizer) { - const scheme = readWhileLetterOrDigit(tokenizer); - tokenizer._currentToken = URLToken.scheme(scheme); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else { - tokenizer._currentState = "HOST"; - } -} -function nextSchemeOrHost(tokenizer) { - const schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentToken = URLToken.host(schemeOrHost); - tokenizer._currentState = "DONE"; - } - else if (getCurrentCharacter(tokenizer) === ":") { - if (peekCharacters(tokenizer, 3) === "://") { - tokenizer._currentToken = URLToken.scheme(schemeOrHost); - tokenizer._currentState = "HOST"; - } - else { - tokenizer._currentToken = URLToken.host(schemeOrHost); - tokenizer._currentState = "PORT"; - } - } - else { - tokenizer._currentToken = URLToken.host(schemeOrHost); - if (getCurrentCharacter(tokenizer) === "/") { - tokenizer._currentState = "PATH"; - } - else { - tokenizer._currentState = "QUERY"; - } - } -} -function nextHost(tokenizer) { - if (peekCharacters(tokenizer, 3) === "://") { - nextCharacter(tokenizer, 3); - } - const host = readUntilCharacter(tokenizer, ":", "/", "?"); - tokenizer._currentToken = URLToken.host(host); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else if (getCurrentCharacter(tokenizer) === ":") { - tokenizer._currentState = "PORT"; - } - else if (getCurrentCharacter(tokenizer) === "/") { - tokenizer._currentState = "PATH"; - } - else { - tokenizer._currentState = "QUERY"; - } -} -function nextPort(tokenizer) { - if (getCurrentCharacter(tokenizer) === ":") { - nextCharacter(tokenizer); - } - const port = readUntilCharacter(tokenizer, "/", "?"); - tokenizer._currentToken = URLToken.port(port); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else if (getCurrentCharacter(tokenizer) === "/") { - tokenizer._currentState = "PATH"; - } - else { - tokenizer._currentState = "QUERY"; - } -} -function nextPath(tokenizer) { - const path = readUntilCharacter(tokenizer, "?"); - tokenizer._currentToken = URLToken.path(path); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else { - tokenizer._currentState = "QUERY"; - } -} -function nextQuery(tokenizer) { - if (getCurrentCharacter(tokenizer) === "?") { - nextCharacter(tokenizer); - } - const query = readRemaining(tokenizer); - tokenizer._currentToken = URLToken.query(query); - tokenizer._currentState = "DONE"; -} -//# sourceMappingURL=url.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js b/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js deleted file mode 100644 index 1505a28ca..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/util/base64.browser.js +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Encodes a string in base64 format. - * @param value - The string to encode - */ -export function encodeString(value) { - return btoa(value); -} -/** - * Encodes a byte array in base64 format. - * @param value - The Uint8Aray to encode - */ -export function encodeByteArray(value) { - let str = ""; - for (let i = 0; i < value.length; i++) { - str += String.fromCharCode(value[i]); - } - return btoa(str); -} -/** - * Decodes a base64 string into a byte array. - * @param value - The base64 string to decode - */ -export function decodeString(value) { - const byteString = atob(value); - const arr = new Uint8Array(byteString.length); - for (let i = 0; i < byteString.length; i++) { - arr[i] = byteString.charCodeAt(i); - } - return arr; -} -//# sourceMappingURL=base64.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/base64.js b/node_modules/@azure/core-http/dist-esm/src/util/base64.js deleted file mode 100644 index f5fff422f..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/util/base64.js +++ /dev/null @@ -1,27 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Encodes a string in base64 format. - * @param value - The string to encode - */ -export function encodeString(value) { - return Buffer.from(value).toString("base64"); -} -/** - * Encodes a byte array in base64 format. - * @param value - The Uint8Aray to encode - */ -export function encodeByteArray(value) { - // Buffer.from accepts | -- the TypeScript definition is off here - // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length - const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); - return bufferValue.toString("base64"); -} -/** - * Decodes a base64 string into a byte array. - * @param value - The base64 string to decode - */ -export function decodeString(value) { - return Buffer.from(value, "base64"); -} -//# sourceMappingURL=base64.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/constants.js b/node_modules/@azure/core-http/dist-esm/src/util/constants.js deleted file mode 100644 index 2d3d47bb1..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/util/constants.js +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * A set of constants used internally when processing requests. - */ -export const Constants = { - /** - * The core-http version - */ - coreHttpVersion: "3.0.4", - /** - * Specifies HTTP. - */ - HTTP: "http:", - /** - * Specifies HTTPS. - */ - HTTPS: "https:", - /** - * Specifies HTTP Proxy. - */ - HTTP_PROXY: "HTTP_PROXY", - /** - * Specifies HTTPS Proxy. - */ - HTTPS_PROXY: "HTTPS_PROXY", - /** - * Specifies NO Proxy. - */ - NO_PROXY: "NO_PROXY", - /** - * Specifies ALL Proxy. - */ - ALL_PROXY: "ALL_PROXY", - HttpConstants: { - /** - * Http Verbs - */ - HttpVerbs: { - PUT: "PUT", - GET: "GET", - DELETE: "DELETE", - POST: "POST", - MERGE: "MERGE", - HEAD: "HEAD", - PATCH: "PATCH", - }, - StatusCodes: { - TooManyRequests: 429, - ServiceUnavailable: 503, - }, - }, - /** - * Defines constants for use with HTTP headers. - */ - HeaderConstants: { - /** - * The Authorization header. - */ - AUTHORIZATION: "authorization", - AUTHORIZATION_SCHEME: "Bearer", - /** - * The Retry-After response-header field can be used with a 503 (Service - * Unavailable) or 349 (Too Many Requests) responses to indicate how long - * the service is expected to be unavailable to the requesting client. - */ - RETRY_AFTER: "Retry-After", - /** - * The UserAgent header. - */ - USER_AGENT: "User-Agent", - }, -}; -//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js b/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js deleted file mode 100644 index e88884502..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/util/exponentialBackoffStrategy.js +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -export const DEFAULT_CLIENT_RETRY_COUNT = 3; -// intervals are in ms -export const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; -export const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; -export const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; -export function isNumber(n) { - return typeof n === "number"; -} -/** - * @internal - * Determines if the operation should be retried. - * - * @param retryLimit - Specifies the max number of retries. - * @param predicate - Initial chekck on whether to retry based on given responses or errors - * @param retryData - The retry data. - * @returns True if the operation qualifies for a retry; false otherwise. - */ -export function shouldRetry(retryLimit, predicate, retryData, response, error) { - if (!predicate(response, error)) { - return false; - } - return retryData.retryCount < retryLimit; -} -/** - * @internal - * Updates the retry data for the next attempt. - * - * @param retryOptions - specifies retry interval, and its lower bound and upper bound. - * @param retryData - The retry data. - * @param err - The operation"s error, if any. - */ -export function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterval: 0 }, err) { - if (err) { - if (retryData.error) { - err.innerError = retryData.error; - } - retryData.error = err; - } - // Adjust retry count - retryData.retryCount++; - // Adjust retry interval - let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; - const boundedRandDelta = retryOptions.retryInterval * 0.8 + - Math.floor(Math.random() * (retryOptions.retryInterval * 0.4)); - incrementDelta *= boundedRandDelta; - retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval); - return retryData; -} -//# sourceMappingURL=exponentialBackoffStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js b/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js deleted file mode 100644 index 65d8b5e2b..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/util/inspect.browser.js +++ /dev/null @@ -1,4 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -export const custom = {}; -//# sourceMappingURL=inspect.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/inspect.js b/node_modules/@azure/core-http/dist-esm/src/util/inspect.js deleted file mode 100644 index 53fba7551..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/util/inspect.js +++ /dev/null @@ -1,5 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { inspect } from "util"; -export const custom = inspect.custom; -//# sourceMappingURL=inspect.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js b/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js deleted file mode 100644 index 7683347f6..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/util/sanitizer.js +++ /dev/null @@ -1,137 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { URLBuilder, URLQuery } from "../url"; -import { isObject } from "./utils"; -const RedactedString = "REDACTED"; -const defaultAllowedHeaderNames = [ - "x-ms-client-request-id", - "x-ms-return-client-request-id", - "x-ms-useragent", - "x-ms-correlation-request-id", - "x-ms-request-id", - "client-request-id", - "ms-cv", - "return-client-request-id", - "traceparent", - "Access-Control-Allow-Credentials", - "Access-Control-Allow-Headers", - "Access-Control-Allow-Methods", - "Access-Control-Allow-Origin", - "Access-Control-Expose-Headers", - "Access-Control-Max-Age", - "Access-Control-Request-Headers", - "Access-Control-Request-Method", - "Origin", - "Accept", - "Accept-Encoding", - "Cache-Control", - "Connection", - "Content-Length", - "Content-Type", - "Date", - "ETag", - "Expires", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "Last-Modified", - "Pragma", - "Request-Id", - "Retry-After", - "Server", - "Transfer-Encoding", - "User-Agent", - "WWW-Authenticate", -]; -const defaultAllowedQueryParameters = ["api-version"]; -export class Sanitizer { - constructor({ allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { - allowedHeaderNames = Array.isArray(allowedHeaderNames) - ? defaultAllowedHeaderNames.concat(allowedHeaderNames) - : defaultAllowedHeaderNames; - allowedQueryParameters = Array.isArray(allowedQueryParameters) - ? defaultAllowedQueryParameters.concat(allowedQueryParameters) - : defaultAllowedQueryParameters; - this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); - this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); - } - sanitize(obj) { - const seen = new Set(); - return JSON.stringify(obj, (key, value) => { - // Ensure Errors include their interesting non-enumerable members - if (value instanceof Error) { - return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); - } - if (key === "_headersMap") { - return this.sanitizeHeaders(value); - } - else if (key === "url") { - return this.sanitizeUrl(value); - } - else if (key === "query") { - return this.sanitizeQuery(value); - } - else if (key === "body") { - // Don't log the request body - return undefined; - } - else if (key === "response") { - // Don't log response again - return undefined; - } - else if (key === "operationSpec") { - // When using sendOperationRequest, the request carries a massive - // field with the autorest spec. No need to log it. - return undefined; - } - else if (Array.isArray(value) || isObject(value)) { - if (seen.has(value)) { - return "[Circular]"; - } - seen.add(value); - } - return value; - }, 2); - } - sanitizeHeaders(value) { - return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value); - } - sanitizeQuery(value) { - return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]); - } - sanitizeObject(value, allowedKeys, accessor) { - if (typeof value !== "object" || value === null) { - return value; - } - const sanitized = {}; - for (const k of Object.keys(value)) { - if (allowedKeys.has(k.toLowerCase())) { - sanitized[k] = accessor(value, k); - } - else { - sanitized[k] = RedactedString; - } - } - return sanitized; - } - sanitizeUrl(value) { - if (typeof value !== "string" || value === null) { - return value; - } - const urlBuilder = URLBuilder.parse(value); - const queryString = urlBuilder.getQuery(); - if (!queryString) { - return value; - } - const query = URLQuery.parse(queryString); - for (const k of query.keys()) { - if (!this.allowedQueryParameters.has(k.toLowerCase())) { - query.set(k, RedactedString); - } - } - urlBuilder.setQuery(query.toString()); - return urlBuilder.toString(); - } -} -//# sourceMappingURL=sanitizer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js b/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js deleted file mode 100644 index cc3bb6162..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/util/serializer.common.js +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Default key used to access the XML attributes. - */ -export const XML_ATTRKEY = "$"; -/** - * Default key used to access the XML value content. - */ -export const XML_CHARKEY = "_"; -//# sourceMappingURL=serializer.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js b/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js deleted file mode 100644 index b9e5a3844..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/util/throttlingRetryStrategy.js +++ /dev/null @@ -1,7 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Maximum number of retries for the throttling retry policy - */ -export const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; -//# sourceMappingURL=throttlingRetryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/utils.js b/node_modules/@azure/core-http/dist-esm/src/util/utils.js deleted file mode 100644 index cebdbb118..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/util/utils.js +++ /dev/null @@ -1,206 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { Constants } from "./constants"; -import { XML_ATTRKEY } from "./serializer.common"; -import { v4 as uuidv4 } from "uuid"; -const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; -/** - * Checks if a parsed URL is HTTPS - * - * @param urlToCheck - The url to check - * @returns True if the URL is HTTPS; false otherwise. - */ -export function urlIsHTTPS(urlToCheck) { - return urlToCheck.protocol.toLowerCase() === Constants.HTTPS; -} -/** - * Encodes an URI. - * - * @param uri - The URI to be encoded. - * @returns The encoded URI. - */ -export function encodeUri(uri) { - return encodeURIComponent(uri) - .replace(/!/g, "%21") - .replace(/"/g, "%27") - .replace(/\(/g, "%28") - .replace(/\)/g, "%29") - .replace(/\*/g, "%2A"); -} -/** - * Returns a stripped version of the Http Response which only contains body, - * headers and the status. - * - * @param response - The Http Response - * @returns The stripped version of Http Response. - */ -export function stripResponse(response) { - const strippedResponse = {}; - strippedResponse.body = response.bodyAsText; - strippedResponse.headers = response.headers; - strippedResponse.status = response.status; - return strippedResponse; -} -/** - * Returns a stripped version of the Http Request that does not contain the - * Authorization header. - * - * @param request - The Http Request object - * @returns The stripped version of Http Request. - */ -export function stripRequest(request) { - const strippedRequest = request.clone(); - if (strippedRequest.headers) { - strippedRequest.headers.remove("authorization"); - } - return strippedRequest; -} -/** - * Validates the given uuid as a string - * - * @param uuid - The uuid as a string that needs to be validated - * @returns True if the uuid is valid; false otherwise. - */ -export function isValidUuid(uuid) { - return validUuidRegex.test(uuid); -} -/** - * Generated UUID - * - * @returns RFC4122 v4 UUID. - */ -export function generateUuid() { - return uuidv4(); -} -/** - * Executes an array of promises sequentially. Inspiration of this method is here: - * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! - * - * @param promiseFactories - An array of promise factories(A function that return a promise) - * @param kickstart - Input to the first promise that is used to kickstart the promise chain. - * If not provided then the promise chain starts with undefined. - * @returns A chain of resolved or rejected promises - */ -export function executePromisesSequentially(promiseFactories, kickstart) { - let result = Promise.resolve(kickstart); - promiseFactories.forEach((promiseFactory) => { - result = result.then(promiseFactory); - }); - return result; -} -/** - * Converts a Promise to a callback. - * @param promise - The Promise to be converted to a callback - * @returns A function that takes the callback `(cb: Function) => void` - * @deprecated generated code should instead depend on responseToBody - */ -// eslint-disable-next-line @typescript-eslint/ban-types -export function promiseToCallback(promise) { - if (typeof promise.then !== "function") { - throw new Error("The provided input is not a Promise."); - } - // eslint-disable-next-line @typescript-eslint/ban-types - return (cb) => { - promise - .then((data) => { - // eslint-disable-next-line promise/no-callback-in-promise - return cb(undefined, data); - }) - .catch((err) => { - // eslint-disable-next-line promise/no-callback-in-promise - cb(err); - }); - }; -} -/** - * Converts a Promise to a service callback. - * @param promise - The Promise of HttpOperationResponse to be converted to a service callback - * @returns A function that takes the service callback (cb: ServiceCallback): void - */ -export function promiseToServiceCallback(promise) { - if (typeof promise.then !== "function") { - throw new Error("The provided input is not a Promise."); - } - return (cb) => { - promise - .then((data) => { - return process.nextTick(cb, undefined, data.parsedBody, data.request, data); - }) - .catch((err) => { - process.nextTick(cb, err); - }); - }; -} -export function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { - if (!Array.isArray(obj)) { - obj = [obj]; - } - if (!xmlNamespaceKey || !xmlNamespace) { - return { [elementName]: obj }; - } - const result = { [elementName]: obj }; - result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; - return result; -} -/** - * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor - * @param targetCtor - The target object on which the properties need to be applied. - * @param sourceCtors - An array of source objects from which the properties need to be taken. - */ -export function applyMixins(targetCtorParam, sourceCtors) { - const castTargetCtorParam = targetCtorParam; - sourceCtors.forEach((sourceCtor) => { - Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { - castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; - }); - }); -} -const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; -/** - * Indicates whether the given string is in ISO 8601 format. - * @param value - The value to be validated for ISO 8601 duration format. - * @returns `true` if valid, `false` otherwise. - */ -export function isDuration(value) { - return validateISODuration.test(value); -} -/** - * Replace all of the instances of searchValue in value with the provided replaceValue. - * @param value - The value to search and replace in. - * @param searchValue - The value to search for in the value argument. - * @param replaceValue - The value to replace searchValue with in the value argument. - * @returns The value where each instance of searchValue was replaced with replacedValue. - */ -export function replaceAll(value, searchValue, replaceValue) { - return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); -} -/** - * Determines whether the given entity is a basic/primitive type - * (string, number, boolean, null, undefined). - * @param value - Any entity - * @returns true is it is primitive type, false otherwise. - */ -export function isPrimitiveType(value) { - return (typeof value !== "object" && typeof value !== "function") || value === null; -} -export function getEnvironmentValue(name) { - if (process.env[name]) { - return process.env[name]; - } - else if (process.env[name.toLowerCase()]) { - return process.env[name.toLowerCase()]; - } - return undefined; -} -/** - * @internal - * @returns true when input is an object type that is not null, Array, RegExp, or Date. - */ -export function isObject(input) { - return (typeof input === "object" && - input !== null && - !Array.isArray(input) && - !(input instanceof RegExp) && - !(input instanceof Date)); -} -//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js b/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js deleted file mode 100644 index e76da182c..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/util/xml.browser.js +++ /dev/null @@ -1,208 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { XML_ATTRKEY, XML_CHARKEY } from "./serializer.common"; -if (!self.document || !self.DOMParser || !self.Node || !self.XMLSerializer) { - throw new Error(`This library depends on the following DOM objects: ["document", "DOMParser", "Node", "XMLSerializer"] to parse XML, but some of these are undefined. You may provide a polyfill to make these globally available in order to support your environment. For more information, please refer to https://aka.ms/azsdk/js/web-workers. `); -} -let cachedDoc; -function getDoc() { - if (!cachedDoc) { - cachedDoc = document.implementation.createDocument(null, null, null); - } - return cachedDoc; -} -let cachedParser; -function getParser() { - if (!cachedParser) { - cachedParser = new DOMParser(); - } - return cachedParser; -} -let cachedSerializer; -function getSerializer() { - if (!cachedSerializer) { - cachedSerializer = new XMLSerializer(); - } - return cachedSerializer; -} -// Policy to make our code Trusted Types compliant. -// https://github.com/w3c/webappsec-trusted-types -// We are calling DOMParser.parseFromString() to parse XML payload from Azure services. -// The parsed DOM object is not exposed to outside. Scripts are disabled when parsing -// according to the spec. There are no HTML/XSS security concerns on the usage of -// parseFromString() here. -let ttPolicy; -if (typeof self.trustedTypes !== "undefined") { - ttPolicy = self.trustedTypes.createPolicy("@azure/core-http#xml.browser", { - createHTML: (s) => s, - }); -} -export function parseXML(str, opts = {}) { - var _a, _b, _c, _d; - try { - const updatedOptions = { - rootName: (_a = opts.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = opts.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = opts.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - const dom = getParser().parseFromString(((_d = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML(str)) !== null && _d !== void 0 ? _d : str), "application/xml"); - throwIfError(dom); - let obj; - if (updatedOptions.includeRoot) { - obj = domToObject(dom, updatedOptions); - } - else { - obj = domToObject(dom.childNodes[0], updatedOptions); - } - return Promise.resolve(obj); - } - catch (err) { - return Promise.reject(err); - } -} -let errorNS; -function getErrorNamespace() { - var _a, _b; - if (errorNS === undefined) { - try { - const invalidXML = ((_a = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML("INVALID")) !== null && _a !== void 0 ? _a : "INVALID"); - errorNS = - (_b = getParser().parseFromString(invalidXML, "text/xml").getElementsByTagName("parsererror")[0] - .namespaceURI) !== null && _b !== void 0 ? _b : ""; - } - catch (ignored) { - // Most browsers will return a document containing , but IE will throw. - errorNS = ""; - } - } - return errorNS; -} -function throwIfError(dom) { - const parserErrors = dom.getElementsByTagName("parsererror"); - if (parserErrors.length > 0 && getErrorNamespace()) { - for (let i = 0; i < parserErrors.length; i++) { - if (parserErrors[i].namespaceURI === errorNS) { - throw new Error(parserErrors[i].innerHTML); - } - } - } -} -function isElement(node) { - return !!node.attributes; -} -/** - * Get the Element-typed version of the provided Node if the provided node is an element with - * attributes. If it isn't, then undefined is returned. - */ -function asElementWithAttributes(node) { - return isElement(node) && node.hasAttributes() ? node : undefined; -} -function domToObject(node, options) { - let result = {}; - const childNodeCount = node.childNodes.length; - const firstChildNode = node.childNodes[0]; - const onlyChildTextValue = (firstChildNode && - childNodeCount === 1 && - firstChildNode.nodeType === Node.TEXT_NODE && - firstChildNode.nodeValue) || - undefined; - const elementWithAttributes = asElementWithAttributes(node); - if (elementWithAttributes) { - result[XML_ATTRKEY] = {}; - for (let i = 0; i < elementWithAttributes.attributes.length; i++) { - const attr = elementWithAttributes.attributes[i]; - result[XML_ATTRKEY][attr.nodeName] = attr.nodeValue; - } - if (onlyChildTextValue) { - result[options.xmlCharKey] = onlyChildTextValue; - } - } - else if (childNodeCount === 0) { - result = ""; - } - else if (onlyChildTextValue) { - result = onlyChildTextValue; - } - if (!onlyChildTextValue) { - for (let i = 0; i < childNodeCount; i++) { - const child = node.childNodes[i]; - // Ignore leading/trailing whitespace nodes - if (child.nodeType !== Node.TEXT_NODE) { - const childObject = domToObject(child, options); - if (!result[child.nodeName]) { - result[child.nodeName] = childObject; - } - else if (Array.isArray(result[child.nodeName])) { - result[child.nodeName].push(childObject); - } - else { - result[child.nodeName] = [result[child.nodeName], childObject]; - } - } - } - } - return result; -} -export function stringifyXML(content, opts = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = opts.rootName) !== null && _a !== void 0 ? _a : "root", - includeRoot: (_b = opts.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = opts.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - const dom = buildNode(content, updatedOptions.rootName, updatedOptions)[0]; - return ('' + - getSerializer().serializeToString(dom)); -} -function buildAttributes(attrs) { - const result = []; - for (const key of Object.keys(attrs)) { - const attr = getDoc().createAttribute(key); - attr.value = attrs[key].toString(); - result.push(attr); - } - return result; -} -function buildNode(obj, elementName, options) { - if (obj === undefined || - obj === null || - typeof obj === "string" || - typeof obj === "number" || - typeof obj === "boolean") { - const elem = getDoc().createElement(elementName); - elem.textContent = obj === undefined || obj === null ? "" : obj.toString(); - return [elem]; - } - else if (Array.isArray(obj)) { - const result = []; - for (const arrayElem of obj) { - for (const child of buildNode(arrayElem, elementName, options)) { - result.push(child); - } - } - return result; - } - else if (typeof obj === "object") { - const elem = getDoc().createElement(elementName); - for (const key of Object.keys(obj)) { - if (key === XML_ATTRKEY) { - for (const attr of buildAttributes(obj[key])) { - elem.attributes.setNamedItem(attr); - } - } - else if (key === options.xmlCharKey) { - elem.textContent = obj[key].toString(); - } - else { - for (const child of buildNode(obj[key], key, options)) { - elem.appendChild(child); - } - } - } - return [elem]; - } - else { - throw new Error(`Illegal value passed to buildObject: ${obj}`); - } -} -//# sourceMappingURL=xml.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/util/xml.js b/node_modules/@azure/core-http/dist-esm/src/util/xml.js deleted file mode 100644 index 230f0c227..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/util/xml.js +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import * as xml2js from "xml2js"; -import { XML_ATTRKEY, XML_CHARKEY } from "./serializer.common"; -// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed -// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 -// By creating a new copy of the settings each time we instantiate the parser, -// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally. -const xml2jsDefaultOptionsV2 = { - explicitCharkey: false, - trim: false, - normalize: false, - normalizeTags: false, - attrkey: XML_ATTRKEY, - explicitArray: true, - ignoreAttrs: false, - mergeAttrs: false, - explicitRoot: true, - validator: undefined, - xmlns: false, - explicitChildren: false, - preserveChildrenOrder: false, - childkey: "$$", - charsAsChildren: false, - includeWhiteChars: false, - async: false, - strict: true, - attrNameProcessors: undefined, - attrValueProcessors: undefined, - tagNameProcessors: undefined, - valueProcessors: undefined, - rootName: "root", - xmldec: { - version: "1.0", - encoding: "UTF-8", - standalone: true, - }, - doctype: undefined, - renderOpts: { - pretty: true, - indent: " ", - newline: "\n", - }, - headless: false, - chunkSize: 10000, - emptyTag: "", - cdata: false, -}; -// The xml2js settings for general XML parsing operations. -const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); -xml2jsParserSettings.explicitArray = false; -// The xml2js settings for general XML building operations. -const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); -xml2jsBuilderSettings.explicitArray = false; -xml2jsBuilderSettings.renderOpts = { - pretty: false, -}; -/** - * Converts given JSON object to XML string - * @param obj - JSON object to be converted into XML string - * @param opts - Options that govern the parsing of given JSON object - */ -export function stringifyXML(obj, opts = {}) { - var _a; - xml2jsBuilderSettings.rootName = opts.rootName; - xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const builder = new xml2js.Builder(xml2jsBuilderSettings); - return builder.buildObject(obj); -} -/** - * Converts given XML string into JSON - * @param str - String containing the XML content to be parsed into JSON - * @param opts - Options that govern the parsing of given xml string - */ -export function parseXML(str, opts = {}) { - var _a; - xml2jsParserSettings.explicitRoot = !!opts.includeRoot; - xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const xmlParser = new xml2js.Parser(xml2jsParserSettings); - return new Promise((resolve, reject) => { - if (!str) { - reject(new Error("Document is empty")); - } - else { - xmlParser.parseString(str, (err, res) => { - if (err) { - reject(err); - } - else { - resolve(res); - } - }); - } - }); -} -//# sourceMappingURL=xml.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/webResource.js b/node_modules/@azure/core-http/dist-esm/src/webResource.js deleted file mode 100644 index 3fc3d27e5..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/webResource.js +++ /dev/null @@ -1,264 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { HttpHeaders, isHttpHeadersLike } from "./httpHeaders"; -import { Serializer } from "./serializer"; -import { generateUuid } from "./util/utils"; -export function isWebResourceLike(object) { - if (object && typeof object === "object") { - const castObject = object; - if (typeof castObject.url === "string" && - typeof castObject.method === "string" && - typeof castObject.headers === "object" && - isHttpHeadersLike(castObject.headers) && - typeof castObject.validateRequestProperties === "function" && - typeof castObject.prepare === "function" && - typeof castObject.clone === "function") { - return true; - } - } - return false; -} -/** - * Creates a new WebResource object. - * - * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary - * properties to initiate a request. - */ -export class WebResource { - constructor(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { - this.streamResponseBody = streamResponseBody; - this.streamResponseStatusCodes = streamResponseStatusCodes; - this.url = url || ""; - this.method = method || "GET"; - this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); - this.body = body; - this.query = query; - this.formData = undefined; - this.withCredentials = withCredentials || false; - this.abortSignal = abortSignal; - this.timeout = timeout || 0; - this.onUploadProgress = onUploadProgress; - this.onDownloadProgress = onDownloadProgress; - this.proxySettings = proxySettings; - this.keepAlive = keepAlive; - this.decompressResponse = decompressResponse; - this.requestId = this.headers.get("x-ms-client-request-id") || generateUuid(); - } - /** - * Validates that the required properties such as method, url, headers["Content-Type"], - * headers["accept-language"] are defined. It will throw an error if one of the above - * mentioned properties are not defined. - */ - validateRequestProperties() { - if (!this.method) { - throw new Error("WebResource.method is required."); - } - if (!this.url) { - throw new Error("WebResource.url is required."); - } - } - /** - * Prepares the request. - * @param options - Options to provide for preparing the request. - * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. - */ - prepare(options) { - if (!options) { - throw new Error("options object is required"); - } - if (options.method === undefined || - options.method === null || - typeof options.method.valueOf() !== "string") { - throw new Error("options.method must be a string."); - } - if (options.url && options.pathTemplate) { - throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); - } - if ((options.pathTemplate === undefined || - options.pathTemplate === null || - typeof options.pathTemplate.valueOf() !== "string") && - (options.url === undefined || - options.url === null || - typeof options.url.valueOf() !== "string")) { - throw new Error("Please provide exactly one of options.pathTemplate or options.url."); - } - // set the url if it is provided. - if (options.url) { - if (typeof options.url !== "string") { - throw new Error('options.url must be of type "string".'); - } - this.url = options.url; - } - // set the method - if (options.method) { - const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; - if (validMethods.indexOf(options.method.toUpperCase()) === -1) { - throw new Error('The provided method "' + - options.method + - '" is invalid. Supported HTTP methods are: ' + - JSON.stringify(validMethods)); - } - } - this.method = options.method.toUpperCase(); - // construct the url if path template is provided - if (options.pathTemplate) { - const { pathTemplate, pathParameters } = options; - if (typeof pathTemplate !== "string") { - throw new Error('options.pathTemplate must be of type "string".'); - } - if (!options.baseUrl) { - options.baseUrl = "https://management.azure.com"; - } - const baseUrl = options.baseUrl; - let url = baseUrl + - (baseUrl.endsWith("/") ? "" : "/") + - (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); - const segments = url.match(/({[\w-]*\s*[\w-]*})/gi); - if (segments && segments.length) { - if (!pathParameters) { - throw new Error(`pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`); - } - segments.forEach(function (item) { - const pathParamName = item.slice(1, -1); - const pathParam = pathParameters[pathParamName]; - if (pathParam === null || - pathParam === undefined || - !(typeof pathParam === "string" || typeof pathParam === "object")) { - const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2); - throw new Error(`pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + - ` however, it is not present in parameters: ${stringifiedPathParameters}.` + - `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + - `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.`); - } - if (typeof pathParam.valueOf() === "string") { - url = url.replace(item, encodeURIComponent(pathParam)); - } - if (typeof pathParam.valueOf() === "object") { - if (!pathParam.value) { - throw new Error(`options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.`); - } - if (pathParam.skipUrlEncoding) { - url = url.replace(item, pathParam.value); - } - else { - url = url.replace(item, encodeURIComponent(pathParam.value)); - } - } - }); - } - this.url = url; - } - // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. - if (options.queryParameters) { - const queryParameters = options.queryParameters; - if (typeof queryParameters !== "object") { - throw new Error(`options.queryParameters must be of type object. It should be a JSON object ` + - `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + - `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.`); - } - // append question mark if it is not present in the url - if (this.url && this.url.indexOf("?") === -1) { - this.url += "?"; - } - // construct queryString - const queryParams = []; - // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). - this.query = {}; - for (const queryParamName in queryParameters) { - const queryParam = queryParameters[queryParamName]; - if (queryParam) { - if (typeof queryParam === "string") { - queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); - this.query[queryParamName] = encodeURIComponent(queryParam); - } - else if (typeof queryParam === "object") { - if (!queryParam.value) { - throw new Error(`options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.`); - } - if (queryParam.skipUrlEncoding) { - queryParams.push(queryParamName + "=" + queryParam.value); - this.query[queryParamName] = queryParam.value; - } - else { - queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); - this.query[queryParamName] = encodeURIComponent(queryParam.value); - } - } - } - } // end-of-for - // append the queryString - this.url += queryParams.join("&"); - } - // add headers to the request if they are provided - if (options.headers) { - const headers = options.headers; - for (const headerName of Object.keys(options.headers)) { - this.headers.set(headerName, headers[headerName]); - } - } - // ensure accept-language is set correctly - if (!this.headers.get("accept-language")) { - this.headers.set("accept-language", "en-US"); - } - // ensure the request-id is set correctly - if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { - this.headers.set("x-ms-client-request-id", this.requestId); - } - // default - if (!this.headers.get("Content-Type")) { - this.headers.set("Content-Type", "application/json; charset=utf-8"); - } - // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly - this.body = options.body; - if (options.body !== undefined && options.body !== null) { - // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. - if (options.bodyIsStream) { - if (!this.headers.get("Transfer-Encoding")) { - this.headers.set("Transfer-Encoding", "chunked"); - } - if (this.headers.get("Content-Type") !== "application/octet-stream") { - this.headers.set("Content-Type", "application/octet-stream"); - } - } - else { - if (options.serializationMapper) { - this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); - } - if (!options.disableJsonStringifyOnBody) { - this.body = JSON.stringify(options.body); - } - } - } - if (options.spanOptions) { - this.spanOptions = options.spanOptions; - } - if (options.tracingContext) { - this.tracingContext = options.tracingContext; - } - this.abortSignal = options.abortSignal; - this.onDownloadProgress = options.onDownloadProgress; - this.onUploadProgress = options.onUploadProgress; - return this; - } - /** - * Clone this WebResource HTTP request object. - * @returns The clone of this WebResource HTTP request object. - */ - clone() { - const result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); - if (this.formData) { - result.formData = this.formData; - } - if (this.operationSpec) { - result.operationSpec = this.operationSpec; - } - if (this.shouldDeserialize) { - result.shouldDeserialize = this.shouldDeserialize; - } - if (this.operationResponseGetter) { - result.operationResponseGetter = this.operationResponseGetter; - } - return result; - } -} -//# sourceMappingURL=webResource.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js b/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js deleted file mode 100644 index 2e3b259f3..000000000 --- a/node_modules/@azure/core-http/dist-esm/src/xhrHttpClient.js +++ /dev/null @@ -1,176 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { HttpHeaders } from "./httpHeaders"; -import { AbortError } from "@azure/abort-controller"; -import { RestError } from "./restError"; -/** - * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. - */ -export class XhrHttpClient { - sendRequest(request) { - var _a; - const xhr = new XMLHttpRequest(); - if (request.proxySettings) { - throw new Error("HTTP proxy is not supported in browser environment"); - } - const abortSignal = request.abortSignal; - if (abortSignal) { - if (abortSignal.aborted) { - return Promise.reject(new AbortError("The operation was aborted.")); - } - const listener = () => { - xhr.abort(); - }; - abortSignal.addEventListener("abort", listener); - xhr.addEventListener("readystatechange", () => { - if (xhr.readyState === XMLHttpRequest.DONE) { - abortSignal.removeEventListener("abort", listener); - } - }); - } - addProgressListener(xhr.upload, request.onUploadProgress); - addProgressListener(xhr, request.onDownloadProgress); - if (request.formData) { - const formData = request.formData; - const requestForm = new FormData(); - const appendFormValue = (key, value) => { - if (value && - Object.prototype.hasOwnProperty.call(value, "value") && - Object.prototype.hasOwnProperty.call(value, "options")) { - requestForm.append(key, value.value, value.options); - } - else { - requestForm.append(key, value); - } - }; - for (const formKey of Object.keys(formData)) { - const formValue = formData[formKey]; - if (Array.isArray(formValue)) { - for (let j = 0; j < formValue.length; j++) { - appendFormValue(formKey, formValue[j]); - } - } - else { - appendFormValue(formKey, formValue); - } - } - request.body = requestForm; - request.formData = undefined; - const contentType = request.headers.get("Content-Type"); - if (contentType && contentType.indexOf("multipart/form-data") !== -1) { - // browser will automatically apply a suitable content-type header - request.headers.remove("Content-Type"); - } - } - xhr.open(request.method, request.url); - xhr.timeout = request.timeout; - xhr.withCredentials = request.withCredentials; - for (const header of request.headers.headersArray()) { - xhr.setRequestHeader(header.name, header.value); - } - xhr.responseType = - ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.size) || request.streamResponseBody ? "blob" : "text"; - // tslint:disable-next-line:no-null-keyword - xhr.send(request.body === undefined ? null : request.body); - if (xhr.responseType === "blob") { - return new Promise((resolve, reject) => { - handleBlobResponse(xhr, request, resolve, reject); - rejectOnTerminalEvent(request, xhr, reject); - }); - } - else { - return new Promise(function (resolve, reject) { - xhr.addEventListener("load", () => resolve({ - request, - status: xhr.status, - headers: parseHeaders(xhr), - bodyAsText: xhr.responseText, - })); - rejectOnTerminalEvent(request, xhr, reject); - }); - } - } -} -function handleBlobResponse(xhr, request, res, rej) { - xhr.addEventListener("readystatechange", () => { - var _a; - // Resolve as soon as headers are loaded - if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { - if (request.streamResponseBody || ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(xhr.status))) { - const blobBody = new Promise((resolve, reject) => { - xhr.addEventListener("load", () => { - resolve(xhr.response); - }); - rejectOnTerminalEvent(request, xhr, reject); - }); - res({ - request, - status: xhr.status, - headers: parseHeaders(xhr), - blobBody, - }); - } - else { - xhr.addEventListener("load", () => { - // xhr.response is of Blob type if the request is sent with xhr.responseType === "blob" - // but the status code is not one of the stream response status codes, - // so treat it as text and convert from Blob to text - if (xhr.response) { - // Blob.text() is not supported in IE so using FileReader instead - const reader = new FileReader(); - reader.onload = function (e) { - var _a; - const text = (_a = e.target) === null || _a === void 0 ? void 0 : _a.result; - res({ - request, - status: xhr.status, - headers: parseHeaders(xhr), - bodyAsText: text, - }); - }; - reader.onerror = function (_e) { - rej(reader.error); - }; - reader.readAsText(xhr.response, "UTF-8"); - } - else { - res({ - request, - status: xhr.status, - headers: parseHeaders(xhr), - }); - } - }); - } - } - }); -} -function addProgressListener(xhr, listener) { - if (listener) { - xhr.addEventListener("progress", (rawEvent) => listener({ - loadedBytes: rawEvent.loaded, - })); - } -} -// exported locally for testing -export function parseHeaders(xhr) { - const responseHeaders = new HttpHeaders(); - const headerLines = xhr - .getAllResponseHeaders() - .trim() - .split(/[\r\n]+/); - for (const line of headerLines) { - const index = line.indexOf(":"); - const headerName = line.slice(0, index); - const headerValue = line.slice(index + 2); - responseHeaders.set(headerName, headerValue); - } - return responseHeaders; -} -function rejectOnTerminalEvent(request, xhr, reject) { - xhr.addEventListener("error", () => reject(new RestError(`Failed to send request to ${request.url}`, RestError.REQUEST_SEND_ERROR, undefined, request))); - const abortError = new AbortError("The operation was aborted."); - xhr.addEventListener("abort", () => reject(abortError)); - xhr.addEventListener("timeout", () => reject(abortError)); -} -//# sourceMappingURL=xhrHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/dist/index.js b/node_modules/@azure/core-http/dist/index.js deleted file mode 100644 index 602ee120e..000000000 --- a/node_modules/@azure/core-http/dist/index.js +++ /dev/null @@ -1,5465 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, '__esModule', { value: true }); - -var uuid = require('uuid'); -var util = require('util'); -var tslib = require('tslib'); -var xml2js = require('xml2js'); -var coreUtil = require('@azure/core-util'); -var logger$1 = require('@azure/logger'); -var coreAuth = require('@azure/core-auth'); -var os = require('os'); -var http = require('http'); -var https = require('https'); -var abortController = require('@azure/abort-controller'); -var tunnel = require('tunnel'); -var stream = require('stream'); -var FormData = require('form-data'); -var node_fetch = require('node-fetch'); -var coreTracing = require('@azure/core-tracing'); - -function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } - -function _interopNamespace(e) { - if (e && e.__esModule) return e; - var n = Object.create(null); - if (e) { - Object.keys(e).forEach(function (k) { - if (k !== 'default') { - var d = Object.getOwnPropertyDescriptor(e, k); - Object.defineProperty(n, k, d.get ? d : { - enumerable: true, - get: function () { return e[k]; } - }); - } - }); - } - n["default"] = e; - return Object.freeze(n); -} - -var xml2js__namespace = /*#__PURE__*/_interopNamespace(xml2js); -var os__namespace = /*#__PURE__*/_interopNamespace(os); -var http__namespace = /*#__PURE__*/_interopNamespace(http); -var https__namespace = /*#__PURE__*/_interopNamespace(https); -var tunnel__namespace = /*#__PURE__*/_interopNamespace(tunnel); -var FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData); -var node_fetch__default = /*#__PURE__*/_interopDefaultLegacy(node_fetch); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * A collection of HttpHeaders that can be sent with a HTTP request. - */ -function getHeaderKey(headerName) { - return headerName.toLowerCase(); -} -function isHttpHeadersLike(object) { - if (object && typeof object === "object") { - const castObject = object; - if (typeof castObject.rawHeaders === "function" && - typeof castObject.clone === "function" && - typeof castObject.get === "function" && - typeof castObject.set === "function" && - typeof castObject.contains === "function" && - typeof castObject.remove === "function" && - typeof castObject.headersArray === "function" && - typeof castObject.headerValues === "function" && - typeof castObject.headerNames === "function" && - typeof castObject.toJson === "function") { - return true; - } - } - return false; -} -/** - * A collection of HTTP header key/value pairs. - */ -class HttpHeaders { - constructor(rawHeaders) { - this._headersMap = {}; - if (rawHeaders) { - for (const headerName in rawHeaders) { - this.set(headerName, rawHeaders[headerName]); - } - } - } - /** - * Set a header in this collection with the provided name and value. The name is - * case-insensitive. - * @param headerName - The name of the header to set. This value is case-insensitive. - * @param headerValue - The value of the header to set. - */ - set(headerName, headerValue) { - this._headersMap[getHeaderKey(headerName)] = { - name: headerName, - value: headerValue.toString().trim(), - }; - } - /** - * Get the header value for the provided header name, or undefined if no header exists in this - * collection with the provided name. - * @param headerName - The name of the header. - */ - get(headerName) { - const header = this._headersMap[getHeaderKey(headerName)]; - return !header ? undefined : header.value; - } - /** - * Get whether or not this header collection contains a header entry for the provided header name. - */ - contains(headerName) { - return !!this._headersMap[getHeaderKey(headerName)]; - } - /** - * Remove the header with the provided headerName. Return whether or not the header existed and - * was removed. - * @param headerName - The name of the header to remove. - */ - remove(headerName) { - const result = this.contains(headerName); - delete this._headersMap[getHeaderKey(headerName)]; - return result; - } - /** - * Get the headers that are contained this collection as an object. - */ - rawHeaders() { - return this.toJson({ preserveCase: true }); - } - /** - * Get the headers that are contained in this collection as an array. - */ - headersArray() { - const headers = []; - for (const headerKey in this._headersMap) { - headers.push(this._headersMap[headerKey]); - } - return headers; - } - /** - * Get the header names that are contained in this collection. - */ - headerNames() { - const headerNames = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { - headerNames.push(headers[i].name); - } - return headerNames; - } - /** - * Get the header values that are contained in this collection. - */ - headerValues() { - const headerValues = []; - const headers = this.headersArray(); - for (let i = 0; i < headers.length; ++i) { - headerValues.push(headers[i].value); - } - return headerValues; - } - /** - * Get the JSON object representation of this HTTP header collection. - */ - toJson(options = {}) { - const result = {}; - if (options.preserveCase) { - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[header.name] = header.value; - } - } - else { - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[getHeaderKey(header.name)] = header.value; - } - } - return result; - } - /** - * Get the string representation of this HTTP header collection. - */ - toString() { - return JSON.stringify(this.toJson({ preserveCase: true })); - } - /** - * Create a deep clone/copy of this HttpHeaders collection. - */ - clone() { - const resultPreservingCasing = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - resultPreservingCasing[header.name] = header.value; - } - return new HttpHeaders(resultPreservingCasing); - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Encodes a string in base64 format. - * @param value - The string to encode - */ -function encodeString(value) { - return Buffer.from(value).toString("base64"); -} -/** - * Encodes a byte array in base64 format. - * @param value - The Uint8Aray to encode - */ -function encodeByteArray(value) { - // Buffer.from accepts | -- the TypeScript definition is off here - // https://nodejs.org/api/buffer.html#buffer_class_method_buffer_from_arraybuffer_byteoffset_length - const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); - return bufferValue.toString("base64"); -} -/** - * Decodes a base64 string into a byte array. - * @param value - The base64 string to decode - */ -function decodeString(value) { - return Buffer.from(value, "base64"); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * A set of constants used internally when processing requests. - */ -const Constants = { - /** - * The core-http version - */ - coreHttpVersion: "3.0.4", - /** - * Specifies HTTP. - */ - HTTP: "http:", - /** - * Specifies HTTPS. - */ - HTTPS: "https:", - /** - * Specifies HTTP Proxy. - */ - HTTP_PROXY: "HTTP_PROXY", - /** - * Specifies HTTPS Proxy. - */ - HTTPS_PROXY: "HTTPS_PROXY", - /** - * Specifies NO Proxy. - */ - NO_PROXY: "NO_PROXY", - /** - * Specifies ALL Proxy. - */ - ALL_PROXY: "ALL_PROXY", - HttpConstants: { - /** - * Http Verbs - */ - HttpVerbs: { - PUT: "PUT", - GET: "GET", - DELETE: "DELETE", - POST: "POST", - MERGE: "MERGE", - HEAD: "HEAD", - PATCH: "PATCH", - }, - StatusCodes: { - TooManyRequests: 429, - ServiceUnavailable: 503, - }, - }, - /** - * Defines constants for use with HTTP headers. - */ - HeaderConstants: { - /** - * The Authorization header. - */ - AUTHORIZATION: "authorization", - AUTHORIZATION_SCHEME: "Bearer", - /** - * The Retry-After response-header field can be used with a 503 (Service - * Unavailable) or 349 (Too Many Requests) responses to indicate how long - * the service is expected to be unavailable to the requesting client. - */ - RETRY_AFTER: "Retry-After", - /** - * The UserAgent header. - */ - USER_AGENT: "User-Agent", - }, -}; - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Default key used to access the XML attributes. - */ -const XML_ATTRKEY = "$"; -/** - * Default key used to access the XML value content. - */ -const XML_CHARKEY = "_"; - -// Copyright (c) Microsoft Corporation. -const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; -/** - * Encodes an URI. - * - * @param uri - The URI to be encoded. - * @returns The encoded URI. - */ -function encodeUri(uri) { - return encodeURIComponent(uri) - .replace(/!/g, "%21") - .replace(/"/g, "%27") - .replace(/\(/g, "%28") - .replace(/\)/g, "%29") - .replace(/\*/g, "%2A"); -} -/** - * Returns a stripped version of the Http Response which only contains body, - * headers and the status. - * - * @param response - The Http Response - * @returns The stripped version of Http Response. - */ -function stripResponse(response) { - const strippedResponse = {}; - strippedResponse.body = response.bodyAsText; - strippedResponse.headers = response.headers; - strippedResponse.status = response.status; - return strippedResponse; -} -/** - * Returns a stripped version of the Http Request that does not contain the - * Authorization header. - * - * @param request - The Http Request object - * @returns The stripped version of Http Request. - */ -function stripRequest(request) { - const strippedRequest = request.clone(); - if (strippedRequest.headers) { - strippedRequest.headers.remove("authorization"); - } - return strippedRequest; -} -/** - * Validates the given uuid as a string - * - * @param uuid - The uuid as a string that needs to be validated - * @returns True if the uuid is valid; false otherwise. - */ -function isValidUuid(uuid) { - return validUuidRegex.test(uuid); -} -/** - * Generated UUID - * - * @returns RFC4122 v4 UUID. - */ -function generateUuid() { - return uuid.v4(); -} -/** - * Executes an array of promises sequentially. Inspiration of this method is here: - * https://pouchdb.com/2015/05/18/we-have-a-problem-with-promises.html. An awesome blog on promises! - * - * @param promiseFactories - An array of promise factories(A function that return a promise) - * @param kickstart - Input to the first promise that is used to kickstart the promise chain. - * If not provided then the promise chain starts with undefined. - * @returns A chain of resolved or rejected promises - */ -function executePromisesSequentially(promiseFactories, kickstart) { - let result = Promise.resolve(kickstart); - promiseFactories.forEach((promiseFactory) => { - result = result.then(promiseFactory); - }); - return result; -} -/** - * Converts a Promise to a callback. - * @param promise - The Promise to be converted to a callback - * @returns A function that takes the callback `(cb: Function) => void` - * @deprecated generated code should instead depend on responseToBody - */ -// eslint-disable-next-line @typescript-eslint/ban-types -function promiseToCallback(promise) { - if (typeof promise.then !== "function") { - throw new Error("The provided input is not a Promise."); - } - // eslint-disable-next-line @typescript-eslint/ban-types - return (cb) => { - promise - .then((data) => { - // eslint-disable-next-line promise/no-callback-in-promise - return cb(undefined, data); - }) - .catch((err) => { - // eslint-disable-next-line promise/no-callback-in-promise - cb(err); - }); - }; -} -/** - * Converts a Promise to a service callback. - * @param promise - The Promise of HttpOperationResponse to be converted to a service callback - * @returns A function that takes the service callback (cb: ServiceCallback): void - */ -function promiseToServiceCallback(promise) { - if (typeof promise.then !== "function") { - throw new Error("The provided input is not a Promise."); - } - return (cb) => { - promise - .then((data) => { - return process.nextTick(cb, undefined, data.parsedBody, data.request, data); - }) - .catch((err) => { - process.nextTick(cb, err); - }); - }; -} -function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { - if (!Array.isArray(obj)) { - obj = [obj]; - } - if (!xmlNamespaceKey || !xmlNamespace) { - return { [elementName]: obj }; - } - const result = { [elementName]: obj }; - result[XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; - return result; -} -/** - * Applies the properties on the prototype of sourceCtors to the prototype of targetCtor - * @param targetCtor - The target object on which the properties need to be applied. - * @param sourceCtors - An array of source objects from which the properties need to be taken. - */ -function applyMixins(targetCtorParam, sourceCtors) { - const castTargetCtorParam = targetCtorParam; - sourceCtors.forEach((sourceCtor) => { - Object.getOwnPropertyNames(sourceCtor.prototype).forEach((name) => { - castTargetCtorParam.prototype[name] = sourceCtor.prototype[name]; - }); - }); -} -const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; -/** - * Indicates whether the given string is in ISO 8601 format. - * @param value - The value to be validated for ISO 8601 duration format. - * @returns `true` if valid, `false` otherwise. - */ -function isDuration(value) { - return validateISODuration.test(value); -} -/** - * Replace all of the instances of searchValue in value with the provided replaceValue. - * @param value - The value to search and replace in. - * @param searchValue - The value to search for in the value argument. - * @param replaceValue - The value to replace searchValue with in the value argument. - * @returns The value where each instance of searchValue was replaced with replacedValue. - */ -function replaceAll(value, searchValue, replaceValue) { - return !value || !searchValue ? value : value.split(searchValue).join(replaceValue || ""); -} -/** - * Determines whether the given entity is a basic/primitive type - * (string, number, boolean, null, undefined). - * @param value - Any entity - * @returns true is it is primitive type, false otherwise. - */ -function isPrimitiveType(value) { - return (typeof value !== "object" && typeof value !== "function") || value === null; -} -function getEnvironmentValue(name) { - if (process.env[name]) { - return process.env[name]; - } - else if (process.env[name.toLowerCase()]) { - return process.env[name.toLowerCase()]; - } - return undefined; -} -/** - * @internal - * @returns true when input is an object type that is not null, Array, RegExp, or Date. - */ -function isObject(input) { - return (typeof input === "object" && - input !== null && - !Array.isArray(input) && - !(input instanceof RegExp) && - !(input instanceof Date)); -} - -// Copyright (c) Microsoft Corporation. -// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. -/** - * Used to map raw response objects to final shapes. - * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. - * Also allows pulling values from headers, as well as inserting default values and constants. - */ -class Serializer { - constructor( - /** - * The provided model mapper. - */ - modelMappers = {}, - /** - * Whether the contents are XML or not. - */ - isXML) { - this.modelMappers = modelMappers; - this.isXML = isXML; - } - /** - * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. - * @param mapper - The definition of data models. - * @param value - The value. - * @param objectName - Name of the object. Used in the error messages. - * @deprecated Removing the constraints validation on client side. - */ - validateConstraints(mapper, value, objectName) { - const failValidation = (constraintName, constraintValue) => { - throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); - }; - if (mapper.constraints && value != undefined) { - const valueAsNumber = value; - const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; - if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { - failValidation("ExclusiveMaximum", ExclusiveMaximum); - } - if (ExclusiveMinimum != undefined && valueAsNumber <= ExclusiveMinimum) { - failValidation("ExclusiveMinimum", ExclusiveMinimum); - } - if (InclusiveMaximum != undefined && valueAsNumber > InclusiveMaximum) { - failValidation("InclusiveMaximum", InclusiveMaximum); - } - if (InclusiveMinimum != undefined && valueAsNumber < InclusiveMinimum) { - failValidation("InclusiveMinimum", InclusiveMinimum); - } - const valueAsArray = value; - if (MaxItems != undefined && valueAsArray.length > MaxItems) { - failValidation("MaxItems", MaxItems); - } - if (MaxLength != undefined && valueAsArray.length > MaxLength) { - failValidation("MaxLength", MaxLength); - } - if (MinItems != undefined && valueAsArray.length < MinItems) { - failValidation("MinItems", MinItems); - } - if (MinLength != undefined && valueAsArray.length < MinLength) { - failValidation("MinLength", MinLength); - } - if (MultipleOf != undefined && valueAsNumber % MultipleOf !== 0) { - failValidation("MultipleOf", MultipleOf); - } - if (Pattern) { - const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; - if (typeof value !== "string" || value.match(pattern) === null) { - failValidation("Pattern", Pattern); - } - } - if (UniqueItems && - valueAsArray.some((item, i, ar) => ar.indexOf(item) !== i)) { - failValidation("UniqueItems", UniqueItems); - } - } - } - /** - * Serialize the given object based on its metadata defined in the mapper. - * - * @param mapper - The mapper which defines the metadata of the serializable object. - * @param object - A valid Javascript object to be serialized. - * @param objectName - Name of the serialized object. - * @param options - additional options to deserialization. - * @returns A valid serialized Javascript object. - */ - serialize(mapper, object, objectName, options = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - let payload = {}; - const mapperType = mapper.type.name; - if (!objectName) { - objectName = mapper.serializedName; - } - if (mapperType.match(/^Sequence$/i) !== null) { - payload = []; - } - if (mapper.isConstant) { - object = mapper.defaultValue; - } - // This table of allowed values should help explain - // the mapper.required and mapper.nullable properties. - // X means "neither undefined or null are allowed". - // || required - // || true | false - // nullable || ========================== - // true || null | undefined/null - // false || X | undefined - // undefined || X | undefined/null - const { required, nullable } = mapper; - if (required && nullable && object === undefined) { - throw new Error(`${objectName} cannot be undefined.`); - } - if (required && !nullable && object == undefined) { - throw new Error(`${objectName} cannot be null or undefined.`); - } - if (!required && nullable === false && object === null) { - throw new Error(`${objectName} cannot be null.`); - } - if (object == undefined) { - payload = object; - } - else { - if (mapperType.match(/^any$/i) !== null) { - payload = object; - } - else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { - payload = serializeBasicTypes(mapperType, objectName, object); - } - else if (mapperType.match(/^Enum$/i) !== null) { - const enumMapper = mapper; - payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); - } - else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { - payload = serializeDateTypes(mapperType, object, objectName); - } - else if (mapperType.match(/^ByteArray$/i) !== null) { - payload = serializeByteArrayType(objectName, object); - } - else if (mapperType.match(/^Base64Url$/i) !== null) { - payload = serializeBase64UrlType(objectName, object); - } - else if (mapperType.match(/^Sequence$/i) !== null) { - payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } - else if (mapperType.match(/^Dictionary$/i) !== null) { - payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } - else if (mapperType.match(/^Composite$/i) !== null) { - payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); - } - } - return payload; - } - /** - * Deserialize the given object based on its metadata defined in the mapper. - * - * @param mapper - The mapper which defines the metadata of the serializable object. - * @param responseBody - A valid Javascript entity to be deserialized. - * @param objectName - Name of the deserialized object. - * @param options - Controls behavior of XML parser and builder. - * @returns A valid deserialized Javascript object. - */ - deserialize(mapper, responseBody, objectName, options = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - if (responseBody == undefined) { - if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { - // Edge case for empty XML non-wrapped lists. xml2js can't distinguish - // between the list being empty versus being missing, - // so let's do the more user-friendly thing and return an empty list. - responseBody = []; - } - // specifically check for undefined as default value can be a falsey value `0, "", false, null` - if (mapper.defaultValue !== undefined) { - responseBody = mapper.defaultValue; - } - return responseBody; - } - let payload; - const mapperType = mapper.type.name; - if (!objectName) { - objectName = mapper.serializedName; - } - if (mapperType.match(/^Composite$/i) !== null) { - payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); - } - else { - if (this.isXML) { - const xmlCharKey = updatedOptions.xmlCharKey; - const castResponseBody = responseBody; - /** - * If the mapper specifies this as a non-composite type value but the responseBody contains - * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, - * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. - */ - if (castResponseBody[XML_ATTRKEY] != undefined && - castResponseBody[xmlCharKey] != undefined) { - responseBody = castResponseBody[xmlCharKey]; - } - } - if (mapperType.match(/^Number$/i) !== null) { - payload = parseFloat(responseBody); - if (isNaN(payload)) { - payload = responseBody; - } - } - else if (mapperType.match(/^Boolean$/i) !== null) { - if (responseBody === "true") { - payload = true; - } - else if (responseBody === "false") { - payload = false; - } - else { - payload = responseBody; - } - } - else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { - payload = responseBody; - } - else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { - payload = new Date(responseBody); - } - else if (mapperType.match(/^UnixTime$/i) !== null) { - payload = unixTimeToDate(responseBody); - } - else if (mapperType.match(/^ByteArray$/i) !== null) { - payload = decodeString(responseBody); - } - else if (mapperType.match(/^Base64Url$/i) !== null) { - payload = base64UrlToByteArray(responseBody); - } - else if (mapperType.match(/^Sequence$/i) !== null) { - payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); - } - else if (mapperType.match(/^Dictionary$/i) !== null) { - payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); - } - } - if (mapper.isConstant) { - payload = mapper.defaultValue; - } - return payload; - } -} -function trimEnd(str, ch) { - let len = str.length; - while (len - 1 >= 0 && str[len - 1] === ch) { - --len; - } - return str.substr(0, len); -} -function bufferToBase64Url(buffer) { - if (!buffer) { - return undefined; - } - if (!(buffer instanceof Uint8Array)) { - throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); - } - // Uint8Array to Base64. - const str = encodeByteArray(buffer); - // Base64 to Base64Url. - return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); -} -function base64UrlToByteArray(str) { - if (!str) { - return undefined; - } - if (str && typeof str.valueOf() !== "string") { - throw new Error("Please provide an input of type string for converting to Uint8Array"); - } - // Base64Url to Base64. - str = str.replace(/-/g, "+").replace(/_/g, "/"); - // Base64 to Uint8Array. - return decodeString(str); -} -function splitSerializeName(prop) { - const classes = []; - let partialclass = ""; - if (prop) { - const subwords = prop.split("."); - for (const item of subwords) { - if (item.charAt(item.length - 1) === "\\") { - partialclass += item.substr(0, item.length - 1) + "."; - } - else { - partialclass += item; - classes.push(partialclass); - partialclass = ""; - } - } - } - return classes; -} -function dateToUnixTime(d) { - if (!d) { - return undefined; - } - if (typeof d.valueOf() === "string") { - d = new Date(d); - } - return Math.floor(d.getTime() / 1000); -} -function unixTimeToDate(n) { - if (!n) { - return undefined; - } - return new Date(n * 1000); -} -function serializeBasicTypes(typeName, objectName, value) { - if (value !== null && value !== undefined) { - if (typeName.match(/^Number$/i) !== null) { - if (typeof value !== "number") { - throw new Error(`${objectName} with value ${value} must be of type number.`); - } - } - else if (typeName.match(/^String$/i) !== null) { - if (typeof value.valueOf() !== "string") { - throw new Error(`${objectName} with value "${value}" must be of type string.`); - } - } - else if (typeName.match(/^Uuid$/i) !== null) { - if (!(typeof value.valueOf() === "string" && isValidUuid(value))) { - throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); - } - } - else if (typeName.match(/^Boolean$/i) !== null) { - if (typeof value !== "boolean") { - throw new Error(`${objectName} with value ${value} must be of type boolean.`); - } - } - else if (typeName.match(/^Stream$/i) !== null) { - const objectType = typeof value; - if (objectType !== "string" && - objectType !== "function" && - !(value instanceof ArrayBuffer) && - !ArrayBuffer.isView(value) && - !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob)) { - throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, or a function returning NodeJS.ReadableStream.`); - } - } - } - return value; -} -function serializeEnumType(objectName, allowedValues, value) { - if (!allowedValues) { - throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); - } - const isPresent = allowedValues.some((item) => { - if (typeof item.valueOf() === "string") { - return item.toLowerCase() === value.toLowerCase(); - } - return item === value; - }); - if (!isPresent) { - throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); - } - return value; -} -function serializeByteArrayType(objectName, value) { - let returnValue = ""; - if (value != undefined) { - if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); - } - returnValue = encodeByteArray(value); - } - return returnValue; -} -function serializeBase64UrlType(objectName, value) { - let returnValue = ""; - if (value != undefined) { - if (!(value instanceof Uint8Array)) { - throw new Error(`${objectName} must be of type Uint8Array.`); - } - returnValue = bufferToBase64Url(value) || ""; - } - return returnValue; -} -function serializeDateTypes(typeName, value, objectName) { - if (value != undefined) { - if (typeName.match(/^Date$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); - } - value = - value instanceof Date - ? value.toISOString().substring(0, 10) - : new Date(value).toISOString().substring(0, 10); - } - else if (typeName.match(/^DateTime$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); - } - value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); - } - else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); - } - value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); - } - else if (typeName.match(/^UnixTime$/i) !== null) { - if (!(value instanceof Date || - (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { - throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + - `for it to be serialized in UnixTime/Epoch format.`); - } - value = dateToUnixTime(value); - } - else if (typeName.match(/^TimeSpan$/i) !== null) { - if (!isDuration(value)) { - throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); - } - } - } - return value; -} -function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { - if (!Array.isArray(object)) { - throw new Error(`${objectName} must be of type Array.`); - } - const elementType = mapper.type.element; - if (!elementType || typeof elementType !== "object") { - throw new Error(`element" metadata for an Array must be defined in the ` + - `mapper and it must of type "object" in ${objectName}.`); - } - const tempArray = []; - for (let i = 0; i < object.length; i++) { - const serializedValue = serializer.serialize(elementType, object[i], objectName, options); - if (isXml && elementType.xmlNamespace) { - const xmlnsKey = elementType.xmlNamespacePrefix - ? `xmlns:${elementType.xmlNamespacePrefix}` - : "xmlns"; - if (elementType.type.name === "Composite") { - tempArray[i] = Object.assign({}, serializedValue); - tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; - } - else { - tempArray[i] = {}; - tempArray[i][options.xmlCharKey] = serializedValue; - tempArray[i][XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; - } - } - else { - tempArray[i] = serializedValue; - } - } - return tempArray; -} -function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { - if (typeof object !== "object") { - throw new Error(`${objectName} must be of type object.`); - } - const valueType = mapper.type.value; - if (!valueType || typeof valueType !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the ` + - `mapper and it must of type "object" in ${objectName}.`); - } - const tempDictionary = {}; - for (const key of Object.keys(object)) { - const serializedValue = serializer.serialize(valueType, object[key], objectName, options); - // If the element needs an XML namespace we need to add it within the $ property - tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); - } - // Add the namespace to the root element if needed - if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; - const result = tempDictionary; - result[XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; - return result; - } - return tempDictionary; -} -/** - * Resolves the additionalProperties property from a referenced mapper. - * @param serializer - The serializer containing the entire set of mappers. - * @param mapper - The composite mapper to resolve. - * @param objectName - Name of the object being serialized. - */ -function resolveAdditionalProperties(serializer, mapper, objectName) { - const additionalProperties = mapper.type.additionalProperties; - if (!additionalProperties && mapper.type.className) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); - return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; - } - return additionalProperties; -} -/** - * Finds the mapper referenced by `className`. - * @param serializer - The serializer containing the entire set of mappers - * @param mapper - The composite mapper to resolve - * @param objectName - Name of the object being serialized - */ -function resolveReferencedMapper(serializer, mapper, objectName) { - const className = mapper.type.className; - if (!className) { - throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); - } - return serializer.modelMappers[className]; -} -/** - * Resolves a composite mapper's modelProperties. - * @param serializer - The serializer containing the entire set of mappers - * @param mapper - The composite mapper to resolve - */ -function resolveModelProperties(serializer, mapper, objectName) { - let modelProps = mapper.type.modelProperties; - if (!modelProps) { - const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); - if (!modelMapper) { - throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); - } - modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; - if (!modelProps) { - throw new Error(`modelProperties cannot be null or undefined in the ` + - `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); - } - } - return modelProps; -} -function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { - if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { - mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); - } - if (object != undefined) { - const payload = {}; - const modelProps = resolveModelProperties(serializer, mapper, objectName); - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; - if (propertyMapper.readOnly) { - continue; - } - let propName; - let parentObject = payload; - if (serializer.isXML) { - if (propertyMapper.xmlIsWrapped) { - propName = propertyMapper.xmlName; - } - else { - propName = propertyMapper.xmlElementName || propertyMapper.xmlName; - } - } - else { - const paths = splitSerializeName(propertyMapper.serializedName); - propName = paths.pop(); - for (const pathName of paths) { - const childObject = parentObject[pathName]; - if (childObject == undefined && - (object[key] != undefined || propertyMapper.defaultValue !== undefined)) { - parentObject[pathName] = {}; - } - parentObject = parentObject[pathName]; - } - } - if (parentObject != undefined) { - if (isXml && mapper.xmlNamespace) { - const xmlnsKey = mapper.xmlNamespacePrefix - ? `xmlns:${mapper.xmlNamespacePrefix}` - : "xmlns"; - parentObject[XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); - } - const propertyObjectName = propertyMapper.serializedName !== "" - ? objectName + "." + propertyMapper.serializedName - : objectName; - let toSerialize = object[key]; - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); - if (polymorphicDiscriminator && - polymorphicDiscriminator.clientName === key && - toSerialize == undefined) { - toSerialize = mapper.serializedName; - } - const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); - if (serializedValue !== undefined && propName != undefined) { - const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); - if (isXml && propertyMapper.xmlIsAttribute) { - // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. - // This keeps things simple while preventing name collision - // with names in user documents. - parentObject[XML_ATTRKEY] = parentObject[XML_ATTRKEY] || {}; - parentObject[XML_ATTRKEY][propName] = serializedValue; - } - else if (isXml && propertyMapper.xmlIsWrapped) { - parentObject[propName] = { [propertyMapper.xmlElementName]: value }; - } - else { - parentObject[propName] = value; - } - } - } - } - const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); - if (additionalPropertiesMapper) { - const propNames = Object.keys(modelProps); - for (const clientPropName in object) { - const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); - if (isAdditionalProperty) { - payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); - } - } - } - return payload; - } - return object; -} -function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { - if (!isXml || !propertyMapper.xmlNamespace) { - return serializedValue; - } - const xmlnsKey = propertyMapper.xmlNamespacePrefix - ? `xmlns:${propertyMapper.xmlNamespacePrefix}` - : "xmlns"; - const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; - if (["Composite"].includes(propertyMapper.type.name)) { - if (serializedValue[XML_ATTRKEY]) { - return serializedValue; - } - else { - const result = Object.assign({}, serializedValue); - result[XML_ATTRKEY] = xmlNamespace; - return result; - } - } - const result = {}; - result[options.xmlCharKey] = serializedValue; - result[XML_ATTRKEY] = xmlNamespace; - return result; -} -function isSpecialXmlProperty(propertyName, options) { - return [XML_ATTRKEY, options.xmlCharKey].includes(propertyName); -} -function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { - var _a, _b; - const xmlCharKey = (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { - mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); - } - const modelProps = resolveModelProperties(serializer, mapper, objectName); - let instance = {}; - const handledPropertyNames = []; - for (const key of Object.keys(modelProps)) { - const propertyMapper = modelProps[key]; - const paths = splitSerializeName(modelProps[key].serializedName); - handledPropertyNames.push(paths[0]); - const { serializedName, xmlName, xmlElementName } = propertyMapper; - let propertyObjectName = objectName; - if (serializedName !== "" && serializedName !== undefined) { - propertyObjectName = objectName + "." + serializedName; - } - const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; - if (headerCollectionPrefix) { - const dictionary = {}; - for (const headerKey of Object.keys(responseBody)) { - if (headerKey.startsWith(headerCollectionPrefix)) { - dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); - } - handledPropertyNames.push(headerKey); - } - instance[key] = dictionary; - } - else if (serializer.isXML) { - if (propertyMapper.xmlIsAttribute && responseBody[XML_ATTRKEY]) { - instance[key] = serializer.deserialize(propertyMapper, responseBody[XML_ATTRKEY][xmlName], propertyObjectName, options); - } - else if (propertyMapper.xmlIsMsText) { - if (responseBody[xmlCharKey] !== undefined) { - instance[key] = responseBody[xmlCharKey]; - } - else if (typeof responseBody === "string") { - // The special case where xml parser parses "content" into JSON of - // `{ name: "content"}` instead of `{ name: { "_": "content" }}` - instance[key] = responseBody; - } - } - else { - const propertyName = xmlElementName || xmlName || serializedName; - if (propertyMapper.xmlIsWrapped) { - /* a list of wrapped by - For the xml example below - - ... - ... - - the responseBody has - { - Cors: { - CorsRule: [{...}, {...}] - } - } - xmlName is "Cors" and xmlElementName is"CorsRule". - */ - const wrapped = responseBody[xmlName]; - const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; - instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); - handledPropertyNames.push(xmlName); - } - else { - const property = responseBody[propertyName]; - instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); - handledPropertyNames.push(propertyName); - } - } - } - else { - // deserialize the property if it is present in the provided responseBody instance - let propertyInstance; - let res = responseBody; - // traversing the object step by step. - for (const item of paths) { - if (!res) - break; - res = res[item]; - } - propertyInstance = res; - const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; - // checking that the model property name (key)(ex: "fishtype") and the - // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") - // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") - // is a better approach. The generator is not consistent with escaping '\.' in the - // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator - // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, - // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and - // the transformation of model property name (ex: "fishtype") is done consistently. - // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. - if (polymorphicDiscriminator && - key === polymorphicDiscriminator.clientName && - propertyInstance == undefined) { - propertyInstance = mapper.serializedName; - } - let serializedValue; - // paging - if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { - propertyInstance = responseBody[key]; - const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); - // Copy over any properties that have already been added into the instance, where they do - // not exist on the newly de-serialized array - for (const [k, v] of Object.entries(instance)) { - if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { - arrayInstance[k] = v; - } - } - instance = arrayInstance; - } - else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { - serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); - instance[key] = serializedValue; - } - } - } - const additionalPropertiesMapper = mapper.type.additionalProperties; - if (additionalPropertiesMapper) { - const isAdditionalProperty = (responsePropName) => { - for (const clientPropName in modelProps) { - const paths = splitSerializeName(modelProps[clientPropName].serializedName); - if (paths[0] === responsePropName) { - return false; - } - } - return true; - }; - for (const responsePropName in responseBody) { - if (isAdditionalProperty(responsePropName)) { - instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); - } - } - } - else if (responseBody) { - for (const key of Object.keys(responseBody)) { - if (instance[key] === undefined && - !handledPropertyNames.includes(key) && - !isSpecialXmlProperty(key, options)) { - instance[key] = responseBody[key]; - } - } - } - return instance; -} -function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { - const value = mapper.type.value; - if (!value || typeof value !== "object") { - throw new Error(`"value" metadata for a Dictionary must be defined in the ` + - `mapper and it must of type "object" in ${objectName}`); - } - if (responseBody) { - const tempDictionary = {}; - for (const key of Object.keys(responseBody)) { - tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); - } - return tempDictionary; - } - return responseBody; -} -function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { - const element = mapper.type.element; - if (!element || typeof element !== "object") { - throw new Error(`element" metadata for an Array must be defined in the ` + - `mapper and it must of type "object" in ${objectName}`); - } - if (responseBody) { - if (!Array.isArray(responseBody)) { - // xml2js will interpret a single element array as just the element, so force it to be an array - responseBody = [responseBody]; - } - const tempArray = []; - for (let i = 0; i < responseBody.length; i++) { - tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); - } - return tempArray; - } - return responseBody; -} -function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { - const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); - if (polymorphicDiscriminator) { - const discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; - if (discriminatorName != undefined) { - const discriminatorValue = object[discriminatorName]; - if (discriminatorValue != undefined) { - const typeName = mapper.type.uberParent || mapper.type.className; - const indexDiscriminator = discriminatorValue === typeName - ? discriminatorValue - : typeName + "." + discriminatorValue; - const polymorphicMapper = serializer.modelMappers.discriminators[indexDiscriminator]; - if (polymorphicMapper) { - mapper = polymorphicMapper; - } - } - } - } - return mapper; -} -function getPolymorphicDiscriminatorRecursively(serializer, mapper) { - return (mapper.type.polymorphicDiscriminator || - getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || - getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); -} -function getPolymorphicDiscriminatorSafely(serializer, typeName) { - return (typeName && - serializer.modelMappers[typeName] && - serializer.modelMappers[typeName].type.polymorphicDiscriminator); -} -/** - * Utility function that serializes an object that might contain binary information into a plain object, array or a string. - */ -function serializeObject(toSerialize) { - const castToSerialize = toSerialize; - if (toSerialize == undefined) - return undefined; - if (toSerialize instanceof Uint8Array) { - toSerialize = encodeByteArray(toSerialize); - return toSerialize; - } - else if (toSerialize instanceof Date) { - return toSerialize.toISOString(); - } - else if (Array.isArray(toSerialize)) { - const array = []; - for (let i = 0; i < toSerialize.length; i++) { - array.push(serializeObject(toSerialize[i])); - } - return array; - } - else if (typeof toSerialize === "object") { - const dictionary = {}; - for (const property in toSerialize) { - dictionary[property] = serializeObject(castToSerialize[property]); - } - return dictionary; - } - return toSerialize; -} -/** - * Utility function to create a K:V from a list of strings - */ -function strEnum(o) { - const result = {}; - for (const key of o) { - result[key] = key; - } - return result; -} -/** - * String enum containing the string types of property mappers. - */ -// eslint-disable-next-line @typescript-eslint/no-redeclare -const MapperType = strEnum([ - "Base64Url", - "Boolean", - "ByteArray", - "Composite", - "Date", - "DateTime", - "DateTimeRfc1123", - "Dictionary", - "Enum", - "Number", - "Object", - "Sequence", - "String", - "Stream", - "TimeSpan", - "UnixTime", -]); - -// Copyright (c) Microsoft Corporation. -function isWebResourceLike(object) { - if (object && typeof object === "object") { - const castObject = object; - if (typeof castObject.url === "string" && - typeof castObject.method === "string" && - typeof castObject.headers === "object" && - isHttpHeadersLike(castObject.headers) && - typeof castObject.validateRequestProperties === "function" && - typeof castObject.prepare === "function" && - typeof castObject.clone === "function") { - return true; - } - } - return false; -} -/** - * Creates a new WebResource object. - * - * This class provides an abstraction over a REST call by being library / implementation agnostic and wrapping the necessary - * properties to initiate a request. - */ -class WebResource { - constructor(url, method, body, query, headers, streamResponseBody, withCredentials, abortSignal, timeout, onUploadProgress, onDownloadProgress, proxySettings, keepAlive, decompressResponse, streamResponseStatusCodes) { - this.streamResponseBody = streamResponseBody; - this.streamResponseStatusCodes = streamResponseStatusCodes; - this.url = url || ""; - this.method = method || "GET"; - this.headers = isHttpHeadersLike(headers) ? headers : new HttpHeaders(headers); - this.body = body; - this.query = query; - this.formData = undefined; - this.withCredentials = withCredentials || false; - this.abortSignal = abortSignal; - this.timeout = timeout || 0; - this.onUploadProgress = onUploadProgress; - this.onDownloadProgress = onDownloadProgress; - this.proxySettings = proxySettings; - this.keepAlive = keepAlive; - this.decompressResponse = decompressResponse; - this.requestId = this.headers.get("x-ms-client-request-id") || generateUuid(); - } - /** - * Validates that the required properties such as method, url, headers["Content-Type"], - * headers["accept-language"] are defined. It will throw an error if one of the above - * mentioned properties are not defined. - */ - validateRequestProperties() { - if (!this.method) { - throw new Error("WebResource.method is required."); - } - if (!this.url) { - throw new Error("WebResource.url is required."); - } - } - /** - * Prepares the request. - * @param options - Options to provide for preparing the request. - * @returns Returns the prepared WebResource (HTTP Request) object that needs to be given to the request pipeline. - */ - prepare(options) { - if (!options) { - throw new Error("options object is required"); - } - if (options.method === undefined || - options.method === null || - typeof options.method.valueOf() !== "string") { - throw new Error("options.method must be a string."); - } - if (options.url && options.pathTemplate) { - throw new Error("options.url and options.pathTemplate are mutually exclusive. Please provide exactly one of them."); - } - if ((options.pathTemplate === undefined || - options.pathTemplate === null || - typeof options.pathTemplate.valueOf() !== "string") && - (options.url === undefined || - options.url === null || - typeof options.url.valueOf() !== "string")) { - throw new Error("Please provide exactly one of options.pathTemplate or options.url."); - } - // set the url if it is provided. - if (options.url) { - if (typeof options.url !== "string") { - throw new Error('options.url must be of type "string".'); - } - this.url = options.url; - } - // set the method - if (options.method) { - const validMethods = ["GET", "PUT", "HEAD", "DELETE", "OPTIONS", "POST", "PATCH", "TRACE"]; - if (validMethods.indexOf(options.method.toUpperCase()) === -1) { - throw new Error('The provided method "' + - options.method + - '" is invalid. Supported HTTP methods are: ' + - JSON.stringify(validMethods)); - } - } - this.method = options.method.toUpperCase(); - // construct the url if path template is provided - if (options.pathTemplate) { - const { pathTemplate, pathParameters } = options; - if (typeof pathTemplate !== "string") { - throw new Error('options.pathTemplate must be of type "string".'); - } - if (!options.baseUrl) { - options.baseUrl = "https://management.azure.com"; - } - const baseUrl = options.baseUrl; - let url = baseUrl + - (baseUrl.endsWith("/") ? "" : "/") + - (pathTemplate.startsWith("/") ? pathTemplate.slice(1) : pathTemplate); - const segments = url.match(/({[\w-]*\s*[\w-]*})/gi); - if (segments && segments.length) { - if (!pathParameters) { - throw new Error(`pathTemplate: ${pathTemplate} has been provided. Hence, options.pathParameters must also be provided.`); - } - segments.forEach(function (item) { - const pathParamName = item.slice(1, -1); - const pathParam = pathParameters[pathParamName]; - if (pathParam === null || - pathParam === undefined || - !(typeof pathParam === "string" || typeof pathParam === "object")) { - const stringifiedPathParameters = JSON.stringify(pathParameters, undefined, 2); - throw new Error(`pathTemplate: ${pathTemplate} contains the path parameter ${pathParamName}` + - ` however, it is not present in parameters: ${stringifiedPathParameters}.` + - `The value of the path parameter can either be a "string" of the form { ${pathParamName}: "some sample value" } or ` + - `it can be an "object" of the form { "${pathParamName}": { value: "some sample value", skipUrlEncoding: true } }.`); - } - if (typeof pathParam.valueOf() === "string") { - url = url.replace(item, encodeURIComponent(pathParam)); - } - if (typeof pathParam.valueOf() === "object") { - if (!pathParam.value) { - throw new Error(`options.pathParameters[${pathParamName}] is of type "object" but it does not contain a "value" property.`); - } - if (pathParam.skipUrlEncoding) { - url = url.replace(item, pathParam.value); - } - else { - url = url.replace(item, encodeURIComponent(pathParam.value)); - } - } - }); - } - this.url = url; - } - // append query parameters to the url if they are provided. They can be provided with pathTemplate or url option. - if (options.queryParameters) { - const queryParameters = options.queryParameters; - if (typeof queryParameters !== "object") { - throw new Error(`options.queryParameters must be of type object. It should be a JSON object ` + - `of "query-parameter-name" as the key and the "query-parameter-value" as the value. ` + - `The "query-parameter-value" may be fo type "string" or an "object" of the form { value: "query-parameter-value", skipUrlEncoding: true }.`); - } - // append question mark if it is not present in the url - if (this.url && this.url.indexOf("?") === -1) { - this.url += "?"; - } - // construct queryString - const queryParams = []; - // We need to populate this.query as a dictionary if the request is being used for Sway's validateRequest(). - this.query = {}; - for (const queryParamName in queryParameters) { - const queryParam = queryParameters[queryParamName]; - if (queryParam) { - if (typeof queryParam === "string") { - queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam)); - this.query[queryParamName] = encodeURIComponent(queryParam); - } - else if (typeof queryParam === "object") { - if (!queryParam.value) { - throw new Error(`options.queryParameters[${queryParamName}] is of type "object" but it does not contain a "value" property.`); - } - if (queryParam.skipUrlEncoding) { - queryParams.push(queryParamName + "=" + queryParam.value); - this.query[queryParamName] = queryParam.value; - } - else { - queryParams.push(queryParamName + "=" + encodeURIComponent(queryParam.value)); - this.query[queryParamName] = encodeURIComponent(queryParam.value); - } - } - } - } // end-of-for - // append the queryString - this.url += queryParams.join("&"); - } - // add headers to the request if they are provided - if (options.headers) { - const headers = options.headers; - for (const headerName of Object.keys(options.headers)) { - this.headers.set(headerName, headers[headerName]); - } - } - // ensure accept-language is set correctly - if (!this.headers.get("accept-language")) { - this.headers.set("accept-language", "en-US"); - } - // ensure the request-id is set correctly - if (!this.headers.get("x-ms-client-request-id") && !options.disableClientRequestId) { - this.headers.set("x-ms-client-request-id", this.requestId); - } - // default - if (!this.headers.get("Content-Type")) { - this.headers.set("Content-Type", "application/json; charset=utf-8"); - } - // set the request body. request.js automatically sets the Content-Length request header, so we need not set it explicitly - this.body = options.body; - if (options.body !== undefined && options.body !== null) { - // body as a stream special case. set the body as-is and check for some special request headers specific to sending a stream. - if (options.bodyIsStream) { - if (!this.headers.get("Transfer-Encoding")) { - this.headers.set("Transfer-Encoding", "chunked"); - } - if (this.headers.get("Content-Type") !== "application/octet-stream") { - this.headers.set("Content-Type", "application/octet-stream"); - } - } - else { - if (options.serializationMapper) { - this.body = new Serializer(options.mappers).serialize(options.serializationMapper, options.body, "requestBody"); - } - if (!options.disableJsonStringifyOnBody) { - this.body = JSON.stringify(options.body); - } - } - } - if (options.spanOptions) { - this.spanOptions = options.spanOptions; - } - if (options.tracingContext) { - this.tracingContext = options.tracingContext; - } - this.abortSignal = options.abortSignal; - this.onDownloadProgress = options.onDownloadProgress; - this.onUploadProgress = options.onUploadProgress; - return this; - } - /** - * Clone this WebResource HTTP request object. - * @returns The clone of this WebResource HTTP request object. - */ - clone() { - const result = new WebResource(this.url, this.method, this.body, this.query, this.headers && this.headers.clone(), this.streamResponseBody, this.withCredentials, this.abortSignal, this.timeout, this.onUploadProgress, this.onDownloadProgress, this.proxySettings, this.keepAlive, this.decompressResponse, this.streamResponseStatusCodes); - if (this.formData) { - result.formData = this.formData; - } - if (this.operationSpec) { - result.operationSpec = this.operationSpec; - } - if (this.shouldDeserialize) { - result.shouldDeserialize = this.shouldDeserialize; - } - if (this.operationResponseGetter) { - result.operationResponseGetter = this.operationResponseGetter; - } - return result; - } -} - -// Copyright (c) Microsoft Corporation. -/** - * A class that handles the query portion of a URLBuilder. - */ -class URLQuery { - constructor() { - this._rawQuery = {}; - } - /** - * Get whether or not there any query parameters in this URLQuery. - */ - any() { - return Object.keys(this._rawQuery).length > 0; - } - /** - * Get the keys of the query string. - */ - keys() { - return Object.keys(this._rawQuery); - } - /** - * Set a query parameter with the provided name and value. If the parameterValue is undefined or - * empty, then this will attempt to remove an existing query parameter with the provided - * parameterName. - */ - set(parameterName, parameterValue) { - const caseParameterValue = parameterValue; - if (parameterName) { - if (caseParameterValue !== undefined && caseParameterValue !== null) { - const newValue = Array.isArray(caseParameterValue) - ? caseParameterValue - : caseParameterValue.toString(); - this._rawQuery[parameterName] = newValue; - } - else { - delete this._rawQuery[parameterName]; - } - } - } - /** - * Get the value of the query parameter with the provided name. If no parameter exists with the - * provided parameter name, then undefined will be returned. - */ - get(parameterName) { - return parameterName ? this._rawQuery[parameterName] : undefined; - } - /** - * Get the string representation of this query. The return value will not start with a "?". - */ - toString() { - let result = ""; - for (const parameterName in this._rawQuery) { - if (result) { - result += "&"; - } - const parameterValue = this._rawQuery[parameterName]; - if (Array.isArray(parameterValue)) { - const parameterStrings = []; - for (const parameterValueElement of parameterValue) { - parameterStrings.push(`${parameterName}=${parameterValueElement}`); - } - result += parameterStrings.join("&"); - } - else { - result += `${parameterName}=${parameterValue}`; - } - } - return result; - } - /** - * Parse a URLQuery from the provided text. - */ - static parse(text) { - const result = new URLQuery(); - if (text) { - if (text.startsWith("?")) { - text = text.substring(1); - } - let currentState = "ParameterName"; - let parameterName = ""; - let parameterValue = ""; - for (let i = 0; i < text.length; ++i) { - const currentCharacter = text[i]; - switch (currentState) { - case "ParameterName": - switch (currentCharacter) { - case "=": - currentState = "ParameterValue"; - break; - case "&": - parameterName = ""; - parameterValue = ""; - break; - default: - parameterName += currentCharacter; - break; - } - break; - case "ParameterValue": - switch (currentCharacter) { - case "&": - result.set(parameterName, parameterValue); - parameterName = ""; - parameterValue = ""; - currentState = "ParameterName"; - break; - default: - parameterValue += currentCharacter; - break; - } - break; - default: - throw new Error("Unrecognized URLQuery parse state: " + currentState); - } - } - if (currentState === "ParameterValue") { - result.set(parameterName, parameterValue); - } - } - return result; - } -} -/** - * A class that handles creating, modifying, and parsing URLs. - */ -class URLBuilder { - /** - * Set the scheme/protocol for this URL. If the provided scheme contains other parts of a URL - * (such as a host, port, path, or query), those parts will be added to this URL as well. - */ - setScheme(scheme) { - if (!scheme) { - this._scheme = undefined; - } - else { - this.set(scheme, "SCHEME"); - } - } - /** - * Get the scheme that has been set in this URL. - */ - getScheme() { - return this._scheme; - } - /** - * Set the host for this URL. If the provided host contains other parts of a URL (such as a - * port, path, or query), those parts will be added to this URL as well. - */ - setHost(host) { - if (!host) { - this._host = undefined; - } - else { - this.set(host, "SCHEME_OR_HOST"); - } - } - /** - * Get the host that has been set in this URL. - */ - getHost() { - return this._host; - } - /** - * Set the port for this URL. If the provided port contains other parts of a URL (such as a - * path or query), those parts will be added to this URL as well. - */ - setPort(port) { - if (port === undefined || port === null || port === "") { - this._port = undefined; - } - else { - this.set(port.toString(), "PORT"); - } - } - /** - * Get the port that has been set in this URL. - */ - getPort() { - return this._port; - } - /** - * Set the path for this URL. If the provided path contains a query, then it will be added to - * this URL as well. - */ - setPath(path) { - if (!path) { - this._path = undefined; - } - else { - const schemeIndex = path.indexOf("://"); - if (schemeIndex !== -1) { - const schemeStart = path.lastIndexOf("/", schemeIndex); - // Make sure to only grab the URL part of the path before setting the state back to SCHEME - // this will handle cases such as "/a/b/c/https://microsoft.com" => "https://microsoft.com" - this.set(schemeStart === -1 ? path : path.substr(schemeStart + 1), "SCHEME"); - } - else { - this.set(path, "PATH"); - } - } - } - /** - * Append the provided path to this URL's existing path. If the provided path contains a query, - * then it will be added to this URL as well. - */ - appendPath(path) { - if (path) { - let currentPath = this.getPath(); - if (currentPath) { - if (!currentPath.endsWith("/")) { - currentPath += "/"; - } - if (path.startsWith("/")) { - path = path.substring(1); - } - path = currentPath + path; - } - this.set(path, "PATH"); - } - } - /** - * Get the path that has been set in this URL. - */ - getPath() { - return this._path; - } - /** - * Set the query in this URL. - */ - setQuery(query) { - if (!query) { - this._query = undefined; - } - else { - this._query = URLQuery.parse(query); - } - } - /** - * Set a query parameter with the provided name and value in this URL's query. If the provided - * query parameter value is undefined or empty, then the query parameter will be removed if it - * existed. - */ - setQueryParameter(queryParameterName, queryParameterValue) { - if (queryParameterName) { - if (!this._query) { - this._query = new URLQuery(); - } - this._query.set(queryParameterName, queryParameterValue); - } - } - /** - * Get the value of the query parameter with the provided query parameter name. If no query - * parameter exists with the provided name, then undefined will be returned. - */ - getQueryParameterValue(queryParameterName) { - return this._query ? this._query.get(queryParameterName) : undefined; - } - /** - * Get the query in this URL. - */ - getQuery() { - return this._query ? this._query.toString() : undefined; - } - /** - * Set the parts of this URL by parsing the provided text using the provided startState. - */ - set(text, startState) { - const tokenizer = new URLTokenizer(text, startState); - while (tokenizer.next()) { - const token = tokenizer.current(); - let tokenPath; - if (token) { - switch (token.type) { - case "SCHEME": - this._scheme = token.text || undefined; - break; - case "HOST": - this._host = token.text || undefined; - break; - case "PORT": - this._port = token.text || undefined; - break; - case "PATH": - tokenPath = token.text || undefined; - if (!this._path || this._path === "/" || tokenPath !== "/") { - this._path = tokenPath; - } - break; - case "QUERY": - this._query = URLQuery.parse(token.text); - break; - default: - throw new Error(`Unrecognized URLTokenType: ${token.type}`); - } - } - } - } - /** - * Serializes the URL as a string. - * @returns the URL as a string. - */ - toString() { - let result = ""; - if (this._scheme) { - result += `${this._scheme}://`; - } - if (this._host) { - result += this._host; - } - if (this._port) { - result += `:${this._port}`; - } - if (this._path) { - if (!this._path.startsWith("/")) { - result += "/"; - } - result += this._path; - } - if (this._query && this._query.any()) { - result += `?${this._query.toString()}`; - } - return result; - } - /** - * If the provided searchValue is found in this URLBuilder, then replace it with the provided - * replaceValue. - */ - replaceAll(searchValue, replaceValue) { - if (searchValue) { - this.setScheme(replaceAll(this.getScheme(), searchValue, replaceValue)); - this.setHost(replaceAll(this.getHost(), searchValue, replaceValue)); - this.setPort(replaceAll(this.getPort(), searchValue, replaceValue)); - this.setPath(replaceAll(this.getPath(), searchValue, replaceValue)); - this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); - } - } - /** - * Parses a given string URL into a new {@link URLBuilder}. - */ - static parse(text) { - const result = new URLBuilder(); - result.set(text, "SCHEME_OR_HOST"); - return result; - } -} -class URLToken { - constructor(text, type) { - this.text = text; - this.type = type; - } - static scheme(text) { - return new URLToken(text, "SCHEME"); - } - static host(text) { - return new URLToken(text, "HOST"); - } - static port(text) { - return new URLToken(text, "PORT"); - } - static path(text) { - return new URLToken(text, "PATH"); - } - static query(text) { - return new URLToken(text, "QUERY"); - } -} -/** - * Get whether or not the provided character (single character string) is an alphanumeric (letter or - * digit) character. - */ -function isAlphaNumericCharacter(character) { - const characterCode = character.charCodeAt(0); - return ((48 /* '0' */ <= characterCode && characterCode <= 57) /* '9' */ || - (65 /* 'A' */ <= characterCode && characterCode <= 90) /* 'Z' */ || - (97 /* 'a' */ <= characterCode && characterCode <= 122) /* 'z' */); -} -/** - * A class that tokenizes URL strings. - */ -class URLTokenizer { - constructor(_text, state) { - this._text = _text; - this._textLength = _text ? _text.length : 0; - this._currentState = state !== undefined && state !== null ? state : "SCHEME_OR_HOST"; - this._currentIndex = 0; - } - /** - * Get the current URLToken this URLTokenizer is pointing at, or undefined if the URLTokenizer - * hasn't started or has finished tokenizing. - */ - current() { - return this._currentToken; - } - /** - * Advance to the next URLToken and return whether or not a URLToken was found. - */ - next() { - if (!hasCurrentCharacter(this)) { - this._currentToken = undefined; - } - else { - switch (this._currentState) { - case "SCHEME": - nextScheme(this); - break; - case "SCHEME_OR_HOST": - nextSchemeOrHost(this); - break; - case "HOST": - nextHost(this); - break; - case "PORT": - nextPort(this); - break; - case "PATH": - nextPath(this); - break; - case "QUERY": - nextQuery(this); - break; - default: - throw new Error(`Unrecognized URLTokenizerState: ${this._currentState}`); - } - } - return !!this._currentToken; - } -} -/** - * Read the remaining characters from this Tokenizer's character stream. - */ -function readRemaining(tokenizer) { - let result = ""; - if (tokenizer._currentIndex < tokenizer._textLength) { - result = tokenizer._text.substring(tokenizer._currentIndex); - tokenizer._currentIndex = tokenizer._textLength; - } - return result; -} -/** - * Whether or not this URLTokenizer has a current character. - */ -function hasCurrentCharacter(tokenizer) { - return tokenizer._currentIndex < tokenizer._textLength; -} -/** - * Get the character in the text string at the current index. - */ -function getCurrentCharacter(tokenizer) { - return tokenizer._text[tokenizer._currentIndex]; -} -/** - * Advance to the character in text that is "step" characters ahead. If no step value is provided, - * then step will default to 1. - */ -function nextCharacter(tokenizer, step) { - if (hasCurrentCharacter(tokenizer)) { - if (!step) { - step = 1; - } - tokenizer._currentIndex += step; - } -} -/** - * Starting with the current character, peek "charactersToPeek" number of characters ahead in this - * Tokenizer's stream of characters. - */ -function peekCharacters(tokenizer, charactersToPeek) { - let endIndex = tokenizer._currentIndex + charactersToPeek; - if (tokenizer._textLength < endIndex) { - endIndex = tokenizer._textLength; - } - return tokenizer._text.substring(tokenizer._currentIndex, endIndex); -} -/** - * Read characters from this Tokenizer until the end of the stream or until the provided condition - * is false when provided the current character. - */ -function readWhile(tokenizer, condition) { - let result = ""; - while (hasCurrentCharacter(tokenizer)) { - const currentCharacter = getCurrentCharacter(tokenizer); - if (!condition(currentCharacter)) { - break; - } - else { - result += currentCharacter; - nextCharacter(tokenizer); - } - } - return result; -} -/** - * Read characters from this Tokenizer until a non-alphanumeric character or the end of the - * character stream is reached. - */ -function readWhileLetterOrDigit(tokenizer) { - return readWhile(tokenizer, (character) => isAlphaNumericCharacter(character)); -} -/** - * Read characters from this Tokenizer until one of the provided terminating characters is read or - * the end of the character stream is reached. - */ -function readUntilCharacter(tokenizer, ...terminatingCharacters) { - return readWhile(tokenizer, (character) => terminatingCharacters.indexOf(character) === -1); -} -function nextScheme(tokenizer) { - const scheme = readWhileLetterOrDigit(tokenizer); - tokenizer._currentToken = URLToken.scheme(scheme); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else { - tokenizer._currentState = "HOST"; - } -} -function nextSchemeOrHost(tokenizer) { - const schemeOrHost = readUntilCharacter(tokenizer, ":", "/", "?"); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentToken = URLToken.host(schemeOrHost); - tokenizer._currentState = "DONE"; - } - else if (getCurrentCharacter(tokenizer) === ":") { - if (peekCharacters(tokenizer, 3) === "://") { - tokenizer._currentToken = URLToken.scheme(schemeOrHost); - tokenizer._currentState = "HOST"; - } - else { - tokenizer._currentToken = URLToken.host(schemeOrHost); - tokenizer._currentState = "PORT"; - } - } - else { - tokenizer._currentToken = URLToken.host(schemeOrHost); - if (getCurrentCharacter(tokenizer) === "/") { - tokenizer._currentState = "PATH"; - } - else { - tokenizer._currentState = "QUERY"; - } - } -} -function nextHost(tokenizer) { - if (peekCharacters(tokenizer, 3) === "://") { - nextCharacter(tokenizer, 3); - } - const host = readUntilCharacter(tokenizer, ":", "/", "?"); - tokenizer._currentToken = URLToken.host(host); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else if (getCurrentCharacter(tokenizer) === ":") { - tokenizer._currentState = "PORT"; - } - else if (getCurrentCharacter(tokenizer) === "/") { - tokenizer._currentState = "PATH"; - } - else { - tokenizer._currentState = "QUERY"; - } -} -function nextPort(tokenizer) { - if (getCurrentCharacter(tokenizer) === ":") { - nextCharacter(tokenizer); - } - const port = readUntilCharacter(tokenizer, "/", "?"); - tokenizer._currentToken = URLToken.port(port); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else if (getCurrentCharacter(tokenizer) === "/") { - tokenizer._currentState = "PATH"; - } - else { - tokenizer._currentState = "QUERY"; - } -} -function nextPath(tokenizer) { - const path = readUntilCharacter(tokenizer, "?"); - tokenizer._currentToken = URLToken.path(path); - if (!hasCurrentCharacter(tokenizer)) { - tokenizer._currentState = "DONE"; - } - else { - tokenizer._currentState = "QUERY"; - } -} -function nextQuery(tokenizer) { - if (getCurrentCharacter(tokenizer) === "?") { - nextCharacter(tokenizer); - } - const query = readRemaining(tokenizer); - tokenizer._currentToken = URLToken.query(query); - tokenizer._currentState = "DONE"; -} - -// Copyright (c) Microsoft Corporation. -function createProxyAgent(requestUrl, proxySettings, headers) { - const host = URLBuilder.parse(proxySettings.host).getHost(); - if (!host) { - throw new Error("Expecting a non-empty host in proxy settings."); - } - if (!isValidPort(proxySettings.port)) { - throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); - } - const tunnelOptions = { - proxy: { - host: host, - port: proxySettings.port, - headers: (headers && headers.rawHeaders()) || {}, - }, - }; - if (proxySettings.username && proxySettings.password) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; - } - else if (proxySettings.username) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; - } - const isRequestHttps = isUrlHttps(requestUrl); - const isProxyHttps = isUrlHttps(proxySettings.host); - const proxyAgent = { - isHttps: isRequestHttps, - agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), - }; - return proxyAgent; -} -function isUrlHttps(url) { - const urlScheme = URLBuilder.parse(url).getScheme() || ""; - return urlScheme.toLowerCase() === "https"; -} -function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { - if (isRequestHttps && isProxyHttps) { - return tunnel__namespace.httpsOverHttps(tunnelOptions); - } - else if (isRequestHttps && !isProxyHttps) { - return tunnel__namespace.httpsOverHttp(tunnelOptions); - } - else if (!isRequestHttps && isProxyHttps) { - return tunnel__namespace.httpOverHttps(tunnelOptions); - } - else { - return tunnel__namespace.httpOverHttp(tunnelOptions); - } -} -function isValidPort(port) { - // any port in 0-65535 range is valid (RFC 793) even though almost all implementations - // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports - return 0 <= port && port <= 65535; -} - -// Copyright (c) Microsoft Corporation. -const RedactedString = "REDACTED"; -const defaultAllowedHeaderNames = [ - "x-ms-client-request-id", - "x-ms-return-client-request-id", - "x-ms-useragent", - "x-ms-correlation-request-id", - "x-ms-request-id", - "client-request-id", - "ms-cv", - "return-client-request-id", - "traceparent", - "Access-Control-Allow-Credentials", - "Access-Control-Allow-Headers", - "Access-Control-Allow-Methods", - "Access-Control-Allow-Origin", - "Access-Control-Expose-Headers", - "Access-Control-Max-Age", - "Access-Control-Request-Headers", - "Access-Control-Request-Method", - "Origin", - "Accept", - "Accept-Encoding", - "Cache-Control", - "Connection", - "Content-Length", - "Content-Type", - "Date", - "ETag", - "Expires", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "Last-Modified", - "Pragma", - "Request-Id", - "Retry-After", - "Server", - "Transfer-Encoding", - "User-Agent", - "WWW-Authenticate", -]; -const defaultAllowedQueryParameters = ["api-version"]; -class Sanitizer { - constructor({ allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { - allowedHeaderNames = Array.isArray(allowedHeaderNames) - ? defaultAllowedHeaderNames.concat(allowedHeaderNames) - : defaultAllowedHeaderNames; - allowedQueryParameters = Array.isArray(allowedQueryParameters) - ? defaultAllowedQueryParameters.concat(allowedQueryParameters) - : defaultAllowedQueryParameters; - this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); - this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); - } - sanitize(obj) { - const seen = new Set(); - return JSON.stringify(obj, (key, value) => { - // Ensure Errors include their interesting non-enumerable members - if (value instanceof Error) { - return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); - } - if (key === "_headersMap") { - return this.sanitizeHeaders(value); - } - else if (key === "url") { - return this.sanitizeUrl(value); - } - else if (key === "query") { - return this.sanitizeQuery(value); - } - else if (key === "body") { - // Don't log the request body - return undefined; - } - else if (key === "response") { - // Don't log response again - return undefined; - } - else if (key === "operationSpec") { - // When using sendOperationRequest, the request carries a massive - // field with the autorest spec. No need to log it. - return undefined; - } - else if (Array.isArray(value) || isObject(value)) { - if (seen.has(value)) { - return "[Circular]"; - } - seen.add(value); - } - return value; - }, 2); - } - sanitizeHeaders(value) { - return this.sanitizeObject(value, this.allowedHeaderNames, (v, k) => v[k].value); - } - sanitizeQuery(value) { - return this.sanitizeObject(value, this.allowedQueryParameters, (v, k) => v[k]); - } - sanitizeObject(value, allowedKeys, accessor) { - if (typeof value !== "object" || value === null) { - return value; - } - const sanitized = {}; - for (const k of Object.keys(value)) { - if (allowedKeys.has(k.toLowerCase())) { - sanitized[k] = accessor(value, k); - } - else { - sanitized[k] = RedactedString; - } - } - return sanitized; - } - sanitizeUrl(value) { - if (typeof value !== "string" || value === null) { - return value; - } - const urlBuilder = URLBuilder.parse(value); - const queryString = urlBuilder.getQuery(); - if (!queryString) { - return value; - } - const query = URLQuery.parse(queryString); - for (const k of query.keys()) { - if (!this.allowedQueryParameters.has(k.toLowerCase())) { - query.set(k, RedactedString); - } - } - urlBuilder.setQuery(query.toString()); - return urlBuilder.toString(); - } -} - -// Copyright (c) Microsoft Corporation. -const custom = util.inspect.custom; - -// Copyright (c) Microsoft Corporation. -const errorSanitizer = new Sanitizer(); -/** - * An error resulting from an HTTP request to a service endpoint. - */ -class RestError extends Error { - constructor(message, code, statusCode, request, response) { - super(message); - this.name = "RestError"; - this.code = code; - this.statusCode = statusCode; - this.request = request; - this.response = response; - Object.setPrototypeOf(this, RestError.prototype); - } - /** - * Logging method for util.inspect in Node - */ - [custom]() { - return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; - } -} -/** - * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) - */ -RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; -/** - * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. - */ -RestError.PARSE_ERROR = "PARSE_ERROR"; - -// Copyright (c) Microsoft Corporation. -const logger = logger$1.createClientLogger("core-http"); - -// Copyright (c) Microsoft Corporation. -function getCachedAgent(isHttps, agentCache) { - return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; -} -class ReportTransform extends stream.Transform { - constructor(progressCallback) { - super(); - this.progressCallback = progressCallback; - this.loadedBytes = 0; - } - _transform(chunk, _encoding, callback) { - this.push(chunk); - this.loadedBytes += chunk.length; - this.progressCallback({ loadedBytes: this.loadedBytes }); - callback(undefined); - } -} -function isReadableStream(body) { - return body && typeof body.pipe === "function"; -} -function isStreamComplete(stream, aborter) { - return new Promise((resolve) => { - stream.once("close", () => { - aborter === null || aborter === void 0 ? void 0 : aborter.abort(); - resolve(); - }); - stream.once("end", resolve); - stream.once("error", resolve); - }); -} -/** - * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} - */ -function parseHeaders(headers) { - const httpHeaders = new HttpHeaders(); - headers.forEach((value, key) => { - httpHeaders.set(key, value); - }); - return httpHeaders; -} -/** - * An HTTP client that uses `node-fetch`. - */ -class NodeFetchHttpClient { - constructor() { - // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent - this.proxyAgentMap = new Map(); - this.keepAliveAgents = {}; - } - /** - * Provides minimum viable error handling and the logic that executes the abstract methods. - * @param httpRequest - Object representing the outgoing HTTP request. - * @returns An object representing the incoming HTTP response. - */ - async sendRequest(httpRequest) { - var _a; - if (!httpRequest && typeof httpRequest !== "object") { - throw new Error("'httpRequest' (WebResourceLike) cannot be null or undefined and must be of type object."); - } - const abortController$1 = new abortController.AbortController(); - let abortListener; - if (httpRequest.abortSignal) { - if (httpRequest.abortSignal.aborted) { - throw new abortController.AbortError("The operation was aborted."); - } - abortListener = (event) => { - if (event.type === "abort") { - abortController$1.abort(); - } - }; - httpRequest.abortSignal.addEventListener("abort", abortListener); - } - if (httpRequest.timeout) { - setTimeout(() => { - abortController$1.abort(); - }, httpRequest.timeout); - } - if (httpRequest.formData) { - const formData = httpRequest.formData; - const requestForm = new FormData__default["default"](); - const appendFormValue = (key, value) => { - // value function probably returns a stream so we can provide a fresh stream on each retry - if (typeof value === "function") { - value = value(); - } - if (value && - Object.prototype.hasOwnProperty.call(value, "value") && - Object.prototype.hasOwnProperty.call(value, "options")) { - requestForm.append(key, value.value, value.options); - } - else { - requestForm.append(key, value); - } - }; - for (const formKey of Object.keys(formData)) { - const formValue = formData[formKey]; - if (Array.isArray(formValue)) { - for (let j = 0; j < formValue.length; j++) { - appendFormValue(formKey, formValue[j]); - } - } - else { - appendFormValue(formKey, formValue); - } - } - httpRequest.body = requestForm; - httpRequest.formData = undefined; - const contentType = httpRequest.headers.get("Content-Type"); - if (contentType && contentType.indexOf("multipart/form-data") !== -1) { - if (typeof requestForm.getBoundary === "function") { - httpRequest.headers.set("Content-Type", `multipart/form-data; boundary=${requestForm.getBoundary()}`); - } - else { - // browser will automatically apply a suitable content-type header - httpRequest.headers.remove("Content-Type"); - } - } - } - let body = httpRequest.body - ? typeof httpRequest.body === "function" - ? httpRequest.body() - : httpRequest.body - : undefined; - if (httpRequest.onUploadProgress && httpRequest.body) { - const onUploadProgress = httpRequest.onUploadProgress; - const uploadReportStream = new ReportTransform(onUploadProgress); - if (isReadableStream(body)) { - body.pipe(uploadReportStream); - } - else { - uploadReportStream.end(body); - } - body = uploadReportStream; - } - const platformSpecificRequestInit = await this.prepareRequest(httpRequest); - const requestInit = Object.assign({ body: body, headers: httpRequest.headers.rawHeaders(), method: httpRequest.method, - // the types for RequestInit are from the browser, which expects AbortSignal to - // have `reason` and `throwIfAborted`, but these don't exist on our polyfill - // for Node. - signal: abortController$1.signal, redirect: "manual" }, platformSpecificRequestInit); - let operationResponse; - try { - const response = await this.fetch(httpRequest.url, requestInit); - const headers = parseHeaders(response.headers); - const streaming = ((_a = httpRequest.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(response.status)) || - httpRequest.streamResponseBody; - operationResponse = { - headers: headers, - request: httpRequest, - status: response.status, - readableStreamBody: streaming - ? response.body - : undefined, - bodyAsText: !streaming ? await response.text() : undefined, - }; - const onDownloadProgress = httpRequest.onDownloadProgress; - if (onDownloadProgress) { - const responseBody = response.body || undefined; - if (isReadableStream(responseBody)) { - const downloadReportStream = new ReportTransform(onDownloadProgress); - responseBody.pipe(downloadReportStream); - operationResponse.readableStreamBody = downloadReportStream; - } - else { - const length = parseInt(headers.get("Content-Length")) || undefined; - if (length) { - // Calling callback for non-stream response for consistency with browser - onDownloadProgress({ loadedBytes: length }); - } - } - } - await this.processRequest(operationResponse); - return operationResponse; - } - catch (error) { - const fetchError = error; - if (fetchError.code === "ENOTFOUND") { - throw new RestError(fetchError.message, RestError.REQUEST_SEND_ERROR, undefined, httpRequest); - } - else if (fetchError.type === "aborted") { - throw new abortController.AbortError("The operation was aborted."); - } - throw fetchError; - } - finally { - // clean up event listener - if (httpRequest.abortSignal && abortListener) { - let uploadStreamDone = Promise.resolve(); - if (isReadableStream(body)) { - uploadStreamDone = isStreamComplete(body); - } - let downloadStreamDone = Promise.resolve(); - if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { - downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1); - } - Promise.all([uploadStreamDone, downloadStreamDone]) - .then(() => { - var _a; - (_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); - return; - }) - .catch((e) => { - logger.warning("Error when cleaning up abortListener on httpRequest", e); - }); - } - } - } - getOrCreateAgent(httpRequest) { - var _a; - const isHttps = isUrlHttps(httpRequest.url); - // At the moment, proxy settings and keepAlive are mutually - // exclusive because the 'tunnel' library currently lacks the - // ability to create a proxy with keepAlive turned on. - if (httpRequest.proxySettings) { - const { host, port, username, password } = httpRequest.proxySettings; - const key = `${host}:${port}:${username}:${password}`; - const proxyAgents = (_a = this.proxyAgentMap.get(key)) !== null && _a !== void 0 ? _a : {}; - let agent = getCachedAgent(isHttps, proxyAgents); - if (agent) { - return agent; - } - const tunnel = createProxyAgent(httpRequest.url, httpRequest.proxySettings, httpRequest.headers); - agent = tunnel.agent; - if (tunnel.isHttps) { - proxyAgents.httpsAgent = tunnel.agent; - } - else { - proxyAgents.httpAgent = tunnel.agent; - } - this.proxyAgentMap.set(key, proxyAgents); - return agent; - } - else if (httpRequest.keepAlive) { - let agent = getCachedAgent(isHttps, this.keepAliveAgents); - if (agent) { - return agent; - } - const agentOptions = { - keepAlive: httpRequest.keepAlive, - }; - if (isHttps) { - agent = this.keepAliveAgents.httpsAgent = new https__namespace.Agent(agentOptions); - } - else { - agent = this.keepAliveAgents.httpAgent = new http__namespace.Agent(agentOptions); - } - return agent; - } - else { - return isHttps ? https__namespace.globalAgent : http__namespace.globalAgent; - } - } - /** - * Uses `node-fetch` to perform the request. - */ - // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs - async fetch(input, init) { - return node_fetch__default["default"](input, init); - } - /** - * Prepares a request based on the provided web resource. - */ - async prepareRequest(httpRequest) { - const requestInit = {}; - // Set the http(s) agent - requestInit.agent = this.getOrCreateAgent(httpRequest); - requestInit.compress = httpRequest.decompressResponse; - return requestInit; - } - /** - * Process an HTTP response. - */ - async processRequest(_operationResponse) { - /* no_op */ - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The different levels of logs that can be used with the HttpPipelineLogger. - */ -exports.HttpPipelineLogLevel = void 0; -(function (HttpPipelineLogLevel) { - /** - * A log level that indicates that no logs will be logged. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; - /** - * An error log. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; - /** - * A warning log. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; - /** - * An information log. - */ - HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; -})(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); - -// Copyright (c) Microsoft Corporation. -/** - * Converts an OperationOptions to a RequestOptionsBase - * - * @param opts - OperationOptions object to convert to RequestOptionsBase - */ -function operationOptionsToRequestOptionsBase(opts) { - const { requestOptions, tracingOptions } = opts, additionalOptions = tslib.__rest(opts, ["requestOptions", "tracingOptions"]); - let result = additionalOptions; - if (requestOptions) { - result = Object.assign(Object.assign({}, result), requestOptions); - } - if (tracingOptions) { - result.tracingContext = tracingOptions.tracingContext; - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - result.spanOptions = tracingOptions === null || tracingOptions === void 0 ? void 0 : tracingOptions.spanOptions; - } - return result; -} - -// Copyright (c) Microsoft Corporation. -/** - * The base class from which all request policies derive. - */ -class BaseRequestPolicy { - /** - * The main method to implement that manipulates a request/response. - */ - constructor( - /** - * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. - */ - _nextPolicy, - /** - * The options that can be passed to a given request policy. - */ - _options) { - this._nextPolicy = _nextPolicy; - this._options = _options; - } - /** - * Get whether or not a log with the provided log level should be logged. - * @param logLevel - The log level of the log that will be logged. - * @returns Whether or not a log with the provided log level should be logged. - */ - shouldLog(logLevel) { - return this._options.shouldLog(logLevel); - } - /** - * Attempt to log the provided message to the provided logger. If no logger was provided or if - * the log level does not meat the logger's threshold, then nothing will be logged. - * @param logLevel - The log level of this log. - * @param message - The message of this log. - */ - log(logLevel, message) { - this._options.log(logLevel, message); - } -} -/** - * Optional properties that can be used when creating a RequestPolicy. - */ -class RequestPolicyOptions { - constructor(_logger) { - this._logger = _logger; - } - /** - * Get whether or not a log with the provided log level should be logged. - * @param logLevel - The log level of the log that will be logged. - * @returns Whether or not a log with the provided log level should be logged. - */ - shouldLog(logLevel) { - return (!!this._logger && - logLevel !== exports.HttpPipelineLogLevel.OFF && - logLevel <= this._logger.minimumLogLevel); - } - /** - * Attempt to log the provided message to the provided logger. If no logger was provided or if - * the log level does not meet the logger's threshold, then nothing will be logged. - * @param logLevel - The log level of this log. - * @param message - The message of this log. - */ - log(logLevel, message) { - if (this._logger && this.shouldLog(logLevel)) { - this._logger.log(logLevel, message); - } - } -} - -// Copyright (c) Microsoft Corporation. -// Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed -// by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 -// By creating a new copy of the settings each time we instantiate the parser, -// we are safeguarding against the possibility of the default settings being mutated elsewhere unintentionally. -const xml2jsDefaultOptionsV2 = { - explicitCharkey: false, - trim: false, - normalize: false, - normalizeTags: false, - attrkey: XML_ATTRKEY, - explicitArray: true, - ignoreAttrs: false, - mergeAttrs: false, - explicitRoot: true, - validator: undefined, - xmlns: false, - explicitChildren: false, - preserveChildrenOrder: false, - childkey: "$$", - charsAsChildren: false, - includeWhiteChars: false, - async: false, - strict: true, - attrNameProcessors: undefined, - attrValueProcessors: undefined, - tagNameProcessors: undefined, - valueProcessors: undefined, - rootName: "root", - xmldec: { - version: "1.0", - encoding: "UTF-8", - standalone: true, - }, - doctype: undefined, - renderOpts: { - pretty: true, - indent: " ", - newline: "\n", - }, - headless: false, - chunkSize: 10000, - emptyTag: "", - cdata: false, -}; -// The xml2js settings for general XML parsing operations. -const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); -xml2jsParserSettings.explicitArray = false; -// The xml2js settings for general XML building operations. -const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); -xml2jsBuilderSettings.explicitArray = false; -xml2jsBuilderSettings.renderOpts = { - pretty: false, -}; -/** - * Converts given JSON object to XML string - * @param obj - JSON object to be converted into XML string - * @param opts - Options that govern the parsing of given JSON object - */ -function stringifyXML(obj, opts = {}) { - var _a; - xml2jsBuilderSettings.rootName = opts.rootName; - xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const builder = new xml2js__namespace.Builder(xml2jsBuilderSettings); - return builder.buildObject(obj); -} -/** - * Converts given XML string into JSON - * @param str - String containing the XML content to be parsed into JSON - * @param opts - Options that govern the parsing of given xml string - */ -function parseXML(str, opts = {}) { - var _a; - xml2jsParserSettings.explicitRoot = !!opts.includeRoot; - xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const xmlParser = new xml2js__namespace.Parser(xml2jsParserSettings); - return new Promise((resolve, reject) => { - if (!str) { - reject(new Error("Document is empty")); - } - else { - xmlParser.parseString(str, (err, res) => { - if (err) { - reject(err); - } - else { - resolve(res); - } - }); - } - }); -} - -// Copyright (c) Microsoft Corporation. -/** - * Create a new serialization RequestPolicyCreator that will serialized HTTP request bodies as they - * pass through the HTTP pipeline. - */ -function deserializationPolicy(deserializationContentTypes, parsingOptions) { - return { - create: (nextPolicy, options) => { - return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); - }, - }; -} -const defaultJsonContentTypes = ["application/json", "text/json"]; -const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; -const DefaultDeserializationOptions = { - expectedContentTypes: { - json: defaultJsonContentTypes, - xml: defaultXmlContentTypes, - }, -}; -/** - * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the - * HTTP pipeline. - */ -class DeserializationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, requestPolicyOptions, deserializationContentTypes, parsingOptions = {}) { - var _a; - super(nextPolicy, requestPolicyOptions); - this.jsonContentTypes = - (deserializationContentTypes && deserializationContentTypes.json) || defaultJsonContentTypes; - this.xmlContentTypes = - (deserializationContentTypes && deserializationContentTypes.xml) || defaultXmlContentTypes; - this.xmlCharKey = (_a = parsingOptions.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - } - async sendRequest(request) { - return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { - xmlCharKey: this.xmlCharKey, - })); - } -} -function getOperationResponse(parsedResponse) { - let result; - const request = parsedResponse.request; - const operationSpec = request.operationSpec; - if (operationSpec) { - const operationResponseGetter = request.operationResponseGetter; - if (!operationResponseGetter) { - result = operationSpec.responses[parsedResponse.status]; - } - else { - result = operationResponseGetter(operationSpec, parsedResponse); - } - } - return result; -} -function shouldDeserializeResponse(parsedResponse) { - const shouldDeserialize = parsedResponse.request.shouldDeserialize; - let result; - if (shouldDeserialize === undefined) { - result = true; - } - else if (typeof shouldDeserialize === "boolean") { - result = shouldDeserialize; - } - else { - result = shouldDeserialize(parsedResponse); - } - return result; -} -/** - * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. - * @param jsonContentTypes - Response content types to parse the body as JSON. - * @param xmlContentTypes - Response content types to parse the body as XML. - * @param response - HTTP Response from the pipeline. - * @param options - Options to the serializer, mostly for configuring the XML parser if needed. - * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. - */ -function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { - var _a, _b, _c; - const updatedOptions = { - rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", - includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, - }; - return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { - if (!shouldDeserializeResponse(parsedResponse)) { - return parsedResponse; - } - const operationSpec = parsedResponse.request.operationSpec; - if (!operationSpec || !operationSpec.responses) { - return parsedResponse; - } - const responseSpec = getOperationResponse(parsedResponse); - const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec); - if (error) { - throw error; - } - else if (shouldReturnResponse) { - return parsedResponse; - } - // An operation response spec does exist for current status code, so - // use it to deserialize the response. - if (responseSpec) { - if (responseSpec.bodyMapper) { - let valueToDeserialize = parsedResponse.parsedBody; - if (operationSpec.isXML && responseSpec.bodyMapper.type.name === MapperType.Sequence) { - valueToDeserialize = - typeof valueToDeserialize === "object" - ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] - : []; - } - try { - parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); - } - catch (innerError) { - const restError = new RestError(`Error ${innerError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); - throw restError; - } - } - else if (operationSpec.httpMethod === "HEAD") { - // head methods never have a body, but we return a boolean to indicate presence/absence of the resource - parsedResponse.parsedBody = response.status >= 200 && response.status < 300; - } - if (responseSpec.headersMapper) { - parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJson(), "operationRes.parsedHeaders", options); - } - } - return parsedResponse; - }); -} -function isOperationSpecEmpty(operationSpec) { - const expectedStatusCodes = Object.keys(operationSpec.responses); - return (expectedStatusCodes.length === 0 || - (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); -} -function handleErrorResponse(parsedResponse, operationSpec, responseSpec) { - var _a; - const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; - const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) - ? isSuccessByStatus - : !!responseSpec; - if (isExpectedStatusCode) { - if (responseSpec) { - if (!responseSpec.isError) { - return { error: null, shouldReturnResponse: false }; - } - } - else { - return { error: null, shouldReturnResponse: false }; - } - } - const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; - const streaming = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) || - parsedResponse.request.streamResponseBody; - const initialErrorMessage = streaming - ? `Unexpected status code: ${parsedResponse.status}` - : parsedResponse.bodyAsText; - const error = new RestError(initialErrorMessage, undefined, parsedResponse.status, parsedResponse.request, parsedResponse); - // If the item failed but there's no error spec or default spec to deserialize the error, - // we should fail so we just throw the parsed response - if (!errorResponseSpec) { - throw error; - } - const defaultBodyMapper = errorResponseSpec.bodyMapper; - const defaultHeadersMapper = errorResponseSpec.headersMapper; - try { - // If error response has a body, try to deserialize it using default body mapper. - // Then try to extract error code & message from it - if (parsedResponse.parsedBody) { - const parsedBody = parsedResponse.parsedBody; - let parsedError; - if (defaultBodyMapper) { - let valueToDeserialize = parsedBody; - if (operationSpec.isXML && defaultBodyMapper.type.name === MapperType.Sequence) { - valueToDeserialize = - typeof parsedBody === "object" ? parsedBody[defaultBodyMapper.xmlElementName] : []; - } - parsedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody"); - } - const internalError = parsedBody.error || parsedError || parsedBody; - error.code = internalError.code; - if (internalError.message) { - error.message = internalError.message; - } - if (defaultBodyMapper) { - error.response.parsedBody = parsedError; - } - } - // If error response has headers, try to deserialize it using default header mapper - if (parsedResponse.headers && defaultHeadersMapper) { - error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJson(), "operationRes.parsedHeaders"); - } - } - catch (defaultError) { - error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; - } - return { error, shouldReturnResponse: false }; -} -function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { - var _a; - const errorHandler = (err) => { - const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; - const errCode = err.code || RestError.PARSE_ERROR; - const e = new RestError(msg, errCode, operationResponse.status, operationResponse.request, operationResponse); - return Promise.reject(e); - }; - const streaming = ((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) || - operationResponse.request.streamResponseBody; - if (!streaming && operationResponse.bodyAsText) { - const text = operationResponse.bodyAsText; - const contentType = operationResponse.headers.get("Content-Type") || ""; - const contentComponents = !contentType - ? [] - : contentType.split(";").map((component) => component.toLowerCase()); - if (contentComponents.length === 0 || - contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { - return new Promise((resolve) => { - operationResponse.parsedBody = JSON.parse(text); - resolve(operationResponse); - }).catch(errorHandler); - } - else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { - return parseXML(text, opts) - .then((body) => { - operationResponse.parsedBody = body; - return operationResponse; - }) - .catch(errorHandler); - } - } - return Promise.resolve(operationResponse); -} - -// Copyright (c) Microsoft Corporation. -/** - * By default, HTTP connections are maintained for future requests. - */ -const DefaultKeepAliveOptions = { - enable: true, -}; -/** - * Creates a policy that controls whether HTTP connections are maintained on future requests. - * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. - * @returns An instance of the {@link KeepAlivePolicy} - */ -function keepAlivePolicy(keepAliveOptions) { - return { - create: (nextPolicy, options) => { - return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); - }, - }; -} -/** - * KeepAlivePolicy is a policy used to control keep alive settings for every request. - */ -class KeepAlivePolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - * - * @param nextPolicy - - * @param options - - * @param keepAliveOptions - - */ - constructor(nextPolicy, options, keepAliveOptions) { - super(nextPolicy, options); - this.keepAliveOptions = keepAliveOptions; - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.keepAlive = this.keepAliveOptions.enable; - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Methods that are allowed to follow redirects 301 and 302 - */ -const allowedRedirect = ["GET", "HEAD"]; -const DefaultRedirectOptions = { - handleRedirects: true, - maxRetries: 20, -}; -/** - * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. - * @param maximumRetries - Maximum number of redirects to follow. - * @returns An instance of the {@link RedirectPolicy} - */ -function redirectPolicy(maximumRetries = 20) { - return { - create: (nextPolicy, options) => { - return new RedirectPolicy(nextPolicy, options, maximumRetries); - }, - }; -} -/** - * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. - */ -class RedirectPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, maxRetries = 20) { - super(nextPolicy, options); - this.maxRetries = maxRetries; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request) - .then((response) => handleRedirect(this, response, 0)); - } -} -function handleRedirect(policy, response, currentRetries) { - const { request, status } = response; - const locationHeader = response.headers.get("location"); - if (locationHeader && - (status === 300 || - (status === 301 && allowedRedirect.includes(request.method)) || - (status === 302 && allowedRedirect.includes(request.method)) || - (status === 303 && request.method === "POST") || - status === 307) && - (!policy.maxRetries || currentRetries < policy.maxRetries)) { - const builder = URLBuilder.parse(request.url); - builder.setPath(locationHeader); - request.url = builder.toString(); - // POST request with Status code 303 should be converted into a - // redirected GET request if the redirect url is present in the location header - if (status === 303) { - request.method = "GET"; - delete request.body; - } - return policy._nextPolicy - .sendRequest(request) - .then((res) => handleRedirect(policy, res, currentRetries + 1)); - } - return Promise.resolve(response); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const DEFAULT_CLIENT_RETRY_COUNT = 3; -// intervals are in ms -const DEFAULT_CLIENT_RETRY_INTERVAL = 1000 * 30; -const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 90; -const DEFAULT_CLIENT_MIN_RETRY_INTERVAL = 1000 * 3; -function isNumber(n) { - return typeof n === "number"; -} -/** - * @internal - * Determines if the operation should be retried. - * - * @param retryLimit - Specifies the max number of retries. - * @param predicate - Initial chekck on whether to retry based on given responses or errors - * @param retryData - The retry data. - * @returns True if the operation qualifies for a retry; false otherwise. - */ -function shouldRetry(retryLimit, predicate, retryData, response, error) { - if (!predicate(response, error)) { - return false; - } - return retryData.retryCount < retryLimit; -} -/** - * @internal - * Updates the retry data for the next attempt. - * - * @param retryOptions - specifies retry interval, and its lower bound and upper bound. - * @param retryData - The retry data. - * @param err - The operation"s error, if any. - */ -function updateRetryData(retryOptions, retryData = { retryCount: 0, retryInterval: 0 }, err) { - if (err) { - if (retryData.error) { - err.innerError = retryData.error; - } - retryData.error = err; - } - // Adjust retry count - retryData.retryCount++; - // Adjust retry interval - let incrementDelta = Math.pow(2, retryData.retryCount - 1) - 1; - const boundedRandDelta = retryOptions.retryInterval * 0.8 + - Math.floor(Math.random() * (retryOptions.retryInterval * 0.4)); - incrementDelta *= boundedRandDelta; - retryData.retryInterval = Math.min(retryOptions.minRetryInterval + incrementDelta, retryOptions.maxRetryInterval); - return retryData; -} - -// Copyright (c) Microsoft Corporation. -/** - * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. - * @param retryCount - Maximum number of retries. - * @param retryInterval - Base time between retries. - * @param maxRetryInterval - Maximum time to wait between retries. - */ -function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { - return { - create: (nextPolicy, options) => { - return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); - }, - }; -} -/** - * Describes the Retry Mode type. Currently supporting only Exponential. - */ -exports.RetryMode = void 0; -(function (RetryMode) { - /** - * Currently supported retry mode. - * Each time a retry happens, it will take exponentially more time than the last time. - */ - RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; -})(exports.RetryMode || (exports.RetryMode = {})); -const DefaultRetryOptions = { - maxRetries: DEFAULT_CLIENT_RETRY_COUNT, - retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, - maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, -}; -/** - * Instantiates a new "ExponentialRetryPolicyFilter" instance. - */ -class ExponentialRetryPolicy extends BaseRequestPolicy { - /** - * @param nextPolicy - The next RequestPolicy in the pipeline chain. - * @param options - The options for this RequestPolicy. - * @param retryCount - The client retry count. - * @param retryInterval - The client retry interval, in milliseconds. - * @param minRetryInterval - The minimum retry interval, in milliseconds. - * @param maxRetryInterval - The maximum retry interval, in milliseconds. - */ - constructor(nextPolicy, options, retryCount, retryInterval, maxRetryInterval) { - super(nextPolicy, options); - this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; - this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; - this.maxRetryInterval = isNumber(maxRetryInterval) - ? maxRetryInterval - : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request.clone()) - .then((response) => retry$1(this, request, response)) - .catch((error) => retry$1(this, request, error.response, undefined, error)); - } -} -async function retry$1(policy, request, response, retryData, requestError) { - function shouldPolicyRetry(responseParam) { - const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; - if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { - return false; - } - if (statusCode === undefined || - (statusCode < 500 && statusCode !== 408) || - statusCode === 501 || - statusCode === 505) { - return false; - } - return true; - } - retryData = updateRetryData({ - retryInterval: policy.retryInterval, - minRetryInterval: 0, - maxRetryInterval: policy.maxRetryInterval, - }, retryData, requestError); - const isAborted = request.abortSignal && request.abortSignal.aborted; - if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { - logger.info(`Retrying request in ${retryData.retryInterval}`); - try { - await coreUtil.delay(retryData.retryInterval); - const res = await policy._nextPolicy.sendRequest(request.clone()); - return retry$1(policy, request, res, retryData); - } - catch (err) { - return retry$1(policy, request, response, retryData, err); - } - } - else if (isAborted || requestError || !response) { - // If the operation failed in the end, return all errors instead of just the last one - const err = retryData.error || - new RestError("Failed to send the request.", RestError.REQUEST_SEND_ERROR, response && response.status, response && response.request, response); - throw err; - } - else { - return response; - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Creates a policy that logs information about the outgoing request and the incoming responses. - * @param loggingOptions - Logging options. - * @returns An instance of the {@link LogPolicy} - */ -function logPolicy(loggingOptions = {}) { - return { - create: (nextPolicy, options) => { - return new LogPolicy(nextPolicy, options, loggingOptions); - }, - }; -} -/** - * A policy that logs information about the outgoing request and the incoming responses. - */ -class LogPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { - super(nextPolicy, options); - this.logger = logger$1; - this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - get allowedHeaderNames() { - return this.sanitizer.allowedHeaderNames; - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - set allowedHeaderNames(allowedHeaderNames) { - this.sanitizer.allowedHeaderNames = allowedHeaderNames; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - get allowedQueryParameters() { - return this.sanitizer.allowedQueryParameters; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - set allowedQueryParameters(allowedQueryParameters) { - this.sanitizer.allowedQueryParameters = allowedQueryParameters; - } - sendRequest(request) { - if (!this.logger.enabled) - return this._nextPolicy.sendRequest(request); - this.logRequest(request); - return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); - } - logRequest(request) { - this.logger(`Request: ${this.sanitizer.sanitize(request)}`); - } - logResponse(response) { - this.logger(`Response status code: ${response.status}`); - this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); - return response; - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Get the path to this parameter's value as a dotted string (a.b.c). - * @param parameter - The parameter to get the path string for. - * @returns The path to this parameter's value as a dotted string. - */ -function getPathStringFromParameter(parameter) { - return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); -} -function getPathStringFromParameterPath(parameterPath, mapper) { - let result; - if (typeof parameterPath === "string") { - result = parameterPath; - } - else if (Array.isArray(parameterPath)) { - result = parameterPath.join("."); - } - else { - result = mapper.serializedName; - } - return result; -} - -// Copyright (c) Microsoft Corporation. -/** - * Gets the list of status codes for streaming responses. - * @internal - */ -function getStreamResponseStatusCodes(operationSpec) { - const result = new Set(); - for (const statusCode in operationSpec.responses) { - const operationResponse = operationSpec.responses[statusCode]; - if (operationResponse.bodyMapper && - operationResponse.bodyMapper.type.name === MapperType.Stream) { - result.add(Number(statusCode)); - } - } - return result; -} - -// Copyright (c) Microsoft Corporation. -function getDefaultUserAgentKey() { - return Constants.HeaderConstants.USER_AGENT; -} -function getPlatformSpecificData() { - const runtimeInfo = { - key: "Node", - value: process.version, - }; - const osInfo = { - key: "OS", - value: `(${os__namespace.arch()}-${os__namespace.type()}-${os__namespace.release()})`, - }; - return [runtimeInfo, osInfo]; -} - -// Copyright (c) Microsoft Corporation. -function getRuntimeInfo() { - const msRestRuntime = { - key: "core-http", - value: Constants.coreHttpVersion, - }; - return [msRestRuntime]; -} -function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { - return telemetryInfo - .map((info) => { - const value = info.value ? `${valueSeparator}${info.value}` : ""; - return `${info.key}${value}`; - }) - .join(keySeparator); -} -const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; -/** - * The default approach to generate user agents. - * Uses static information from this package, plus system information available from the runtime. - */ -function getDefaultUserAgentValue() { - const runtimeInfo = getRuntimeInfo(); - const platformSpecificData = getPlatformSpecificData(); - const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); - return userAgent; -} -/** - * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. - * @param userAgentData - Telemetry information. - * @returns A new {@link UserAgentPolicy}. - */ -function userAgentPolicy(userAgentData) { - const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null - ? getDefaultUserAgentKey() - : userAgentData.key; - const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null - ? getDefaultUserAgentValue() - : userAgentData.value; - return { - create: (nextPolicy, options) => { - return new UserAgentPolicy(nextPolicy, options, key, value); - }, - }; -} -/** - * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. - */ -class UserAgentPolicy extends BaseRequestPolicy { - constructor(_nextPolicy, _options, headerKey, headerValue) { - super(_nextPolicy, _options); - this._nextPolicy = _nextPolicy; - this._options = _options; - this.headerKey = headerKey; - this.headerValue = headerValue; - } - sendRequest(request) { - this.addUserAgentHeader(request); - return this._nextPolicy.sendRequest(request); - } - /** - * Adds the user agent header to the outgoing request. - */ - addUserAgentHeader(request) { - if (!request.headers) { - request.headers = new HttpHeaders(); - } - if (!request.headers.get(this.headerKey) && this.headerValue) { - request.headers.set(this.headerKey, this.headerValue); - } - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * The format that will be used to join an array of values together for a query parameter value. - */ -exports.QueryCollectionFormat = void 0; -(function (QueryCollectionFormat) { - /** - * CSV: Each pair of segments joined by a single comma. - */ - QueryCollectionFormat["Csv"] = ","; - /** - * SSV: Each pair of segments joined by a single space character. - */ - QueryCollectionFormat["Ssv"] = " "; - /** - * TSV: Each pair of segments joined by a single tab character. - */ - QueryCollectionFormat["Tsv"] = "\t"; - /** - * Pipes: Each pair of segments joined by a single pipe character. - */ - QueryCollectionFormat["Pipes"] = "|"; - /** - * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` - */ - QueryCollectionFormat["Multi"] = "Multi"; -})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); - -// Copyright (c) Microsoft Corporation. -// Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1000, - retryIntervalInMs: 3000, - refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry -}; -/** - * Converts an an unreliable access token getter (which may resolve with null) - * into an AccessTokenGetter by retrying the unreliable getter in a regular - * interval. - * - * @param getAccessToken - a function that produces a promise of an access - * token that may fail by returning null - * @param retryIntervalInMs - the time (in milliseconds) to wait between retry - * attempts - * @param timeoutInMs - the timestamp after which the refresh attempt will fail, - * throwing an exception - * @returns - a promise that, if it resolves, will resolve with an access token - */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } - } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; - } - } - let token = await tryGetAccessToken(); - while (token === null) { - await coreUtil.delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; -} -/** - * Creates a token cycler from a credential, scopes, and optional settings. - * - * A token cycler represents a way to reliably retrieve a valid access token - * from a TokenCredential. It will handle initializing the token, refreshing it - * when it nears expiration, and synchronizes refresh attempts to avoid - * concurrency hazards. - * - * @param credential - the underlying TokenCredential that provides the access - * token - * @param scopes - the scopes to request authorization for - * @param tokenCyclerOptions - optionally override default settings for the cycler - * - * @returns - a function that reliably produces a valid access token - */ -function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); - /** - * This little holder defines several predicates that we use to construct - * the rules of refreshing the token. - */ - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; - }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - var _a; - return (!cycler.isRefreshing && - ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); - }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); - }, - }; - /** - * Starts a refresh job or returns the existing job if one is already - * running. - */ - function refresh(getTokenOptions) { - var _a; - if (!cycler.isRefreshing) { - // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - // Take advantage of promise chaining to insert an assignment to `token` - // before the refresh can be considered done. - refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { - refreshWorker = null; - token = _token; - return token; - }) - .catch((reason) => { - // We also should reset the refresher if we enter a failed state. All - // existing awaiters will throw, but subsequent requests will start a - // new retry chain. - refreshWorker = null; - token = null; - throw reason; - }); - } - return refreshWorker; - } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; -} -// #endregion -/** - * Creates a new factory for a RequestPolicy that applies a bearer token to - * the requests' `Authorization` headers. - * - * @param credential - The TokenCredential implementation that can supply the bearer token. - * @param scopes - The scopes for which the bearer token applies. - */ -function bearerTokenAuthenticationPolicy(credential, scopes) { - // This simple function encapsulates the entire process of reliably retrieving the token - const getToken = createTokenCycler(credential, scopes /* , options */); - class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const { token } = await getToken({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext, - }, - }); - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - return this._nextPolicy.sendRequest(webResource); - } - } - return { - create: (nextPolicy, options) => { - return new BearerTokenAuthenticationPolicy(nextPolicy, options); - }, - }; -} - -// Copyright (c) Microsoft Corporation. -/** - * Returns a request policy factory that can be used to create an instance of - * {@link DisableResponseDecompressionPolicy}. - */ -function disableResponseDecompressionPolicy() { - return { - create: (nextPolicy, options) => { - return new DisableResponseDecompressionPolicy(nextPolicy, options); - }, - }; -} -/** - * A policy to disable response decompression according to Accept-Encoding header - * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding - */ -class DisableResponseDecompressionPolicy extends BaseRequestPolicy { - /** - * Creates an instance of DisableResponseDecompressionPolicy. - * - * @param nextPolicy - - * @param options - - */ - // The parent constructor is protected. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.decompressResponse = false; - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Creates a policy that assigns a unique request id to outgoing requests. - * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. - */ -function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { - return { - create: (nextPolicy, options) => { - return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); - }, - }; -} -class GenerateClientRequestIdPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _requestIdHeaderName) { - super(nextPolicy, options); - this._requestIdHeaderName = _requestIdHeaderName; - } - sendRequest(request) { - if (!request.headers.contains(this._requestIdHeaderName)) { - request.headers.set(this._requestIdHeaderName, request.requestId); - } - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -let cachedHttpClient; -function getCachedDefaultHttpClient() { - if (!cachedHttpClient) { - cachedHttpClient = new NodeFetchHttpClient(); - } - return cachedHttpClient; -} - -// Copyright (c) Microsoft Corporation. -function ndJsonPolicy() { - return { - create: (nextPolicy, options) => { - return new NdJsonPolicy(nextPolicy, options); - }, - }; -} -/** - * NdJsonPolicy that formats a JSON array as newline-delimited JSON - */ -class NdJsonPolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends a request. - */ - async sendRequest(request) { - // There currently isn't a good way to bypass the serializer - if (typeof request.body === "string" && request.body.startsWith("[")) { - const body = JSON.parse(request.body); - if (Array.isArray(body)) { - request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); - } - } - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Stores the patterns specified in NO_PROXY environment variable. - * @internal - */ -const globalNoProxyList = []; -let noProxyListLoaded = false; -/** A cache of whether a host should bypass the proxy. */ -const globalBypassedMap = new Map(); -function loadEnvironmentProxyValue() { - if (!process) { - return undefined; - } - const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); - const allProxy = getEnvironmentValue(Constants.ALL_PROXY); - const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); - return httpsProxy || allProxy || httpProxy; -} -/** - * Check whether the host of a given `uri` matches any pattern in the no proxy list. - * If there's a match, any request sent to the same host shouldn't have the proxy settings set. - * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 - */ -function isBypassed(uri, noProxyList, bypassedMap) { - if (noProxyList.length === 0) { - return false; - } - const host = URLBuilder.parse(uri).getHost(); - if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { - return bypassedMap.get(host); - } - let isBypassedFlag = false; - for (const pattern of noProxyList) { - if (pattern[0] === ".") { - // This should match either domain it self or any subdomain or host - // .foo.com will match foo.com it self or *.foo.com - if (host.endsWith(pattern)) { - isBypassedFlag = true; - } - else { - if (host.length === pattern.length - 1 && host === pattern.slice(1)) { - isBypassedFlag = true; - } - } - } - else { - if (host === pattern) { - isBypassedFlag = true; - } - } - } - bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); - return isBypassedFlag; -} -/** - * @internal - */ -function loadNoProxy() { - const noProxy = getEnvironmentValue(Constants.NO_PROXY); - noProxyListLoaded = true; - if (noProxy) { - return noProxy - .split(",") - .map((item) => item.trim()) - .filter((item) => item.length); - } - return []; -} -/** - * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. - * @param proxyUrl - URL of the proxy - * @returns The default proxy settings, or undefined. - */ -function getDefaultProxySettings(proxyUrl) { - if (!proxyUrl) { - proxyUrl = loadEnvironmentProxyValue(); - if (!proxyUrl) { - return undefined; - } - } - const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); - const parsedUrl = URLBuilder.parse(urlWithoutAuth); - const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; - return { - host: schema + parsedUrl.getHost(), - port: Number.parseInt(parsedUrl.getPort() || "80"), - username, - password, - }; -} -/** - * A policy that allows one to apply proxy settings to all requests. - * If not passed static settings, they will be retrieved from the HTTPS_PROXY - * or HTTP_PROXY environment variables. - * @param proxySettings - ProxySettings to use on each request. - * @param options - additional settings, for example, custom NO_PROXY patterns - */ -function proxyPolicy(proxySettings, options) { - if (!proxySettings) { - proxySettings = getDefaultProxySettings(); - } - if (!noProxyListLoaded) { - globalNoProxyList.push(...loadNoProxy()); - } - return { - create: (nextPolicy, requestPolicyOptions) => { - return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); - }, - }; -} -function extractAuthFromUrl(url) { - const atIndex = url.indexOf("@"); - if (atIndex === -1) { - return { urlWithoutAuth: url }; - } - const schemeIndex = url.indexOf("://"); - const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; - const auth = url.substring(authStart, atIndex); - const colonIndex = auth.indexOf(":"); - const hasPassword = colonIndex !== -1; - const username = hasPassword ? auth.substring(0, colonIndex) : auth; - const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; - const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); - return { - username, - password, - urlWithoutAuth, - }; -} -class ProxyPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, proxySettings, customNoProxyList) { - super(nextPolicy, options); - this.proxySettings = proxySettings; - this.customNoProxyList = customNoProxyList; - } - sendRequest(request) { - var _a; - if (!request.proxySettings && - !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { - request.proxySettings = this.proxySettings; - } - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -function rpRegistrationPolicy(retryTimeout = 30) { - return { - create: (nextPolicy, options) => { - return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); - }, - }; -} -class RPRegistrationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _retryTimeout = 30) { - super(nextPolicy, options); - this._retryTimeout = _retryTimeout; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request.clone()) - .then((response) => registerIfNeeded(this, request, response)); - } -} -function registerIfNeeded(policy, request, response) { - if (response.status === 409) { - const rpName = checkRPNotRegisteredError(response.bodyAsText); - if (rpName) { - const urlPrefix = extractSubscriptionUrl(request.url); - return (registerRP(policy, urlPrefix, rpName, request) - // Autoregistration of ${provider} failed for some reason. We will not return this error - // instead will return the initial response with 409 status code back to the user. - // do nothing here as we are returning the original response at the end of this method. - .catch(() => false) - .then((registrationStatus) => { - if (registrationStatus) { - // Retry the original request. We have to change the x-ms-client-request-id - // otherwise Azure endpoint will return the initial 409 (cached) response. - request.headers.set("x-ms-client-request-id", generateUuid()); - return policy._nextPolicy.sendRequest(request.clone()); - } - return response; - })); - } - } - return Promise.resolve(response); -} -/** - * Reuses the headers of the original request and url (if specified). - * @param originalRequest - The original request - * @param reuseUrlToo - Should the url from the original request be reused as well. Default false. - * @returns A new request object with desired headers. - */ -function getRequestEssentials(originalRequest, reuseUrlToo = false) { - const reqOptions = originalRequest.clone(); - if (reuseUrlToo) { - reqOptions.url = originalRequest.url; - } - // We have to change the x-ms-client-request-id otherwise Azure endpoint - // will return the initial 409 (cached) response. - reqOptions.headers.set("x-ms-client-request-id", generateUuid()); - // Set content-type to application/json - reqOptions.headers.set("Content-Type", "application/json; charset=utf-8"); - return reqOptions; -} -/** - * Validates the error code and message associated with 409 response status code. If it matches to that of - * RP not registered then it returns the name of the RP else returns undefined. - * @param body - The response body received after making the original request. - * @returns The name of the RP if condition is satisfied else undefined. - */ -function checkRPNotRegisteredError(body) { - let result, responseBody; - if (body) { - try { - responseBody = JSON.parse(body); - } - catch (err) { - // do nothing; - } - if (responseBody && - responseBody.error && - responseBody.error.message && - responseBody.error.code && - responseBody.error.code === "MissingSubscriptionRegistration") { - const matchRes = responseBody.error.message.match(/.*'(.*)'/i); - if (matchRes) { - result = matchRes.pop(); - } - } - } - return result; -} -/** - * Extracts the first part of the URL, just after subscription: - * https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ - * @param url - The original request url - * @returns The url prefix as explained above. - */ -function extractSubscriptionUrl(url) { - let result; - const matchRes = url.match(/.*\/subscriptions\/[a-f0-9-]+\//gi); - if (matchRes && matchRes[0]) { - result = matchRes[0]; - } - else { - throw new Error(`Unable to extract subscriptionId from the given url - ${url}.`); - } - return result; -} -/** - * Registers the given provider. - * @param policy - The RPRegistrationPolicy this function is being called against. - * @param urlPrefix - https://management.azure.com/subscriptions/00000000-0000-0000-0000-000000000000/ - * @param provider - The provider name to be registered. - * @param originalRequest - The original request sent by the user that returned a 409 response - * with a message that the provider is not registered. - */ -async function registerRP(policy, urlPrefix, provider, originalRequest) { - const postUrl = `${urlPrefix}providers/${provider}/register?api-version=2016-02-01`; - const getUrl = `${urlPrefix}providers/${provider}?api-version=2016-02-01`; - const reqOptions = getRequestEssentials(originalRequest); - reqOptions.method = "POST"; - reqOptions.url = postUrl; - const response = await policy._nextPolicy.sendRequest(reqOptions); - if (response.status !== 200) { - throw new Error(`Autoregistration of ${provider} failed. Please try registering manually.`); - } - return getRegistrationStatus(policy, getUrl, originalRequest); -} -/** - * Polls the registration status of the provider that was registered. Polling happens at an interval of 30 seconds. - * Polling will happen till the registrationState property of the response body is "Registered". - * @param policy - The RPRegistrationPolicy this function is being called against. - * @param url - The request url for polling - * @param originalRequest - The original request sent by the user that returned a 409 response - * with a message that the provider is not registered. - * @returns True if RP Registration is successful. - */ -async function getRegistrationStatus(policy, url, originalRequest) { - const reqOptions = getRequestEssentials(originalRequest); - reqOptions.url = url; - reqOptions.method = "GET"; - const res = await policy._nextPolicy.sendRequest(reqOptions); - const obj = res.parsedBody; - if (res.parsedBody && obj.registrationState && obj.registrationState === "Registered") { - return true; - } - else { - await coreUtil.delay(policy._retryTimeout * 1000); - return getRegistrationStatus(policy, url, originalRequest); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. - * @param authenticationProvider - The authentication provider. - * @returns An instance of the {@link SigningPolicy}. - */ -function signingPolicy(authenticationProvider) { - return { - create: (nextPolicy, options) => { - return new SigningPolicy(nextPolicy, options, authenticationProvider); - }, - }; -} -/** - * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. - */ -class SigningPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, authenticationProvider) { - super(nextPolicy, options); - this.authenticationProvider = authenticationProvider; - } - signRequest(request) { - return this.authenticationProvider.signRequest(request); - } - sendRequest(request) { - return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". - * @param retryCount - Maximum number of retries. - * @param retryInterval - The client retry interval, in milliseconds. - * @param minRetryInterval - The minimum retry interval, in milliseconds. - * @param maxRetryInterval - The maximum retry interval, in milliseconds. - * @returns An instance of the {@link SystemErrorRetryPolicy} - */ -function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { - return { - create: (nextPolicy, options) => { - return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); - }, - }; -} -/** - * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". - * @param retryCount - The client retry count. - * @param retryInterval - The client retry interval, in milliseconds. - * @param minRetryInterval - The minimum retry interval, in milliseconds. - * @param maxRetryInterval - The maximum retry interval, in milliseconds. - */ -class SystemErrorRetryPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval) { - super(nextPolicy, options); - this.retryCount = isNumber(retryCount) ? retryCount : DEFAULT_CLIENT_RETRY_COUNT; - this.retryInterval = isNumber(retryInterval) ? retryInterval : DEFAULT_CLIENT_RETRY_INTERVAL; - this.minRetryInterval = isNumber(minRetryInterval) - ? minRetryInterval - : DEFAULT_CLIENT_MIN_RETRY_INTERVAL; - this.maxRetryInterval = isNumber(maxRetryInterval) - ? maxRetryInterval - : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; - } - sendRequest(request) { - return this._nextPolicy - .sendRequest(request.clone()) - .catch((error) => retry(this, request, error.response, error)); - } -} -async function retry(policy, request, operationResponse, err, retryData) { - retryData = updateRetryData(policy, retryData, err); - function shouldPolicyRetry(_response, error) { - if (error && - error.code && - (error.code === "ETIMEDOUT" || - error.code === "ESOCKETTIMEDOUT" || - error.code === "ECONNREFUSED" || - error.code === "ECONNRESET" || - error.code === "ENOENT")) { - return true; - } - return false; - } - if (shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, operationResponse, err)) { - // If previous operation ended with an error and the policy allows a retry, do that - try { - await coreUtil.delay(retryData.retryInterval); - return policy._nextPolicy.sendRequest(request.clone()); - } - catch (nestedErr) { - return retry(policy, request, operationResponse, nestedErr, retryData); - } - } - else { - if (err) { - // If the operation failed in the end, return all errors instead of just the last one - return Promise.reject(retryData.error); - } - return operationResponse; - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Maximum number of retries for the throttling retry policy - */ -const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; - -// Copyright (c) Microsoft Corporation. -const StatusCodes = Constants.HttpConstants.StatusCodes; -/** - * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. - * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. - * - * To learn more, please refer to - * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, - * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and - * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors - * @returns - */ -function throttlingRetryPolicy() { - return { - create: (nextPolicy, options) => { - return new ThrottlingRetryPolicy(nextPolicy, options); - }, - }; -} -const StandardAbortMessage = "The operation was aborted."; -/** - * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. - * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. - * - * To learn more, please refer to - * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, - * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and - * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors - */ -class ThrottlingRetryPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, _handleResponse) { - super(nextPolicy, options); - this.numberOfRetries = 0; - this._handleResponse = _handleResponse || this._defaultResponseHandler; - } - async sendRequest(httpRequest) { - const response = await this._nextPolicy.sendRequest(httpRequest.clone()); - if (response.status !== StatusCodes.TooManyRequests && - response.status !== StatusCodes.ServiceUnavailable) { - return response; - } - else { - return this._handleResponse(httpRequest, response); - } - } - async _defaultResponseHandler(httpRequest, httpResponse) { - var _a; - const retryAfterHeader = httpResponse.headers.get(Constants.HeaderConstants.RETRY_AFTER); - if (retryAfterHeader) { - const delayInMs = ThrottlingRetryPolicy.parseRetryAfterHeader(retryAfterHeader); - if (delayInMs) { - this.numberOfRetries += 1; - await coreUtil.delay(delayInMs, { - abortSignal: httpRequest.abortSignal, - abortErrorMsg: StandardAbortMessage, - }); - if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw new abortController.AbortError(StandardAbortMessage); - } - if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { - return this.sendRequest(httpRequest); - } - else { - return this._nextPolicy.sendRequest(httpRequest); - } - } - } - return httpResponse; - } - static parseRetryAfterHeader(headerValue) { - const retryAfterInSeconds = Number(headerValue); - if (Number.isNaN(retryAfterInSeconds)) { - return ThrottlingRetryPolicy.parseDateRetryAfterHeader(headerValue); - } - else { - return retryAfterInSeconds * 1000; - } - } - static parseDateRetryAfterHeader(headerValue) { - try { - const now = Date.now(); - const date = Date.parse(headerValue); - const diff = date - now; - return Number.isNaN(diff) ? undefined : diff; - } - catch (error) { - return undefined; - } - } -} - -// Copyright (c) Microsoft Corporation. -const createSpan = coreTracing.createSpanFunction({ - packagePrefix: "", - namespace: "", -}); -/** - * Creates a policy that wraps outgoing requests with a tracing span. - * @param tracingOptions - Tracing options. - * @returns An instance of the {@link TracingPolicy} class. - */ -function tracingPolicy(tracingOptions = {}) { - return { - create(nextPolicy, options) { - return new TracingPolicy(nextPolicy, options, tracingOptions); - }, - }; -} -/** - * A policy that wraps outgoing requests with a tracing span. - */ -class TracingPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, tracingOptions) { - super(nextPolicy, options); - this.userAgent = tracingOptions.userAgent; - } - async sendRequest(request) { - if (!request.tracingContext) { - return this._nextPolicy.sendRequest(request); - } - const span = this.tryCreateSpan(request); - if (!span) { - return this._nextPolicy.sendRequest(request); - } - try { - const response = await this._nextPolicy.sendRequest(request); - this.tryProcessResponse(span, response); - return response; - } - catch (err) { - this.tryProcessError(span, err); - throw err; - } - } - tryCreateSpan(request) { - var _a; - try { - // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. - // We can pass this as a separate parameter once we upgrade to the latest core-tracing. - const { span } = createSpan(`HTTP ${request.method}`, { - tracingOptions: { - spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), - tracingContext: request.tracingContext, - }, - }); - // If the span is not recording, don't do any more work. - if (!span.isRecording()) { - span.end(); - return undefined; - } - const namespaceFromContext = (_a = request.tracingContext) === null || _a === void 0 ? void 0 : _a.getValue(Symbol.for("az.namespace")); - if (typeof namespaceFromContext === "string") { - span.setAttribute("az.namespace", namespaceFromContext); - } - span.setAttributes({ - "http.method": request.method, - "http.url": request.url, - requestId: request.requestId, - }); - if (this.userAgent) { - span.setAttribute("http.user_agent", this.userAgent); - } - // set headers - const spanContext = span.spanContext(); - const traceParentHeader = coreTracing.getTraceParentHeader(spanContext); - if (traceParentHeader && coreTracing.isSpanContextValid(spanContext)) { - request.headers.set("traceparent", traceParentHeader); - const traceState = spanContext.traceState && spanContext.traceState.serialize(); - // if tracestate is set, traceparent MUST be set, so only set tracestate after traceparent - if (traceState) { - request.headers.set("tracestate", traceState); - } - } - return span; - } - catch (error) { - logger.warning(`Skipping creating a tracing span due to an error: ${error.message}`); - return undefined; - } - } - tryProcessError(span, err) { - try { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: err.message, - }); - if (err.statusCode) { - span.setAttribute("http.status_code", err.statusCode); - } - span.end(); - } - catch (error) { - logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); - } - } - tryProcessResponse(span, response) { - try { - span.setAttribute("http.status_code", response.status); - const serviceRequestId = response.headers.get("x-ms-request-id"); - if (serviceRequestId) { - span.setAttribute("serviceRequestId", serviceRequestId); - } - span.setStatus({ - code: coreTracing.SpanStatusCode.OK, - }); - span.end(); - } - catch (error) { - logger.warning(`Skipping tracing span processing due to an error: ${error.message}`); - } - } -} - -// Copyright (c) Microsoft Corporation. -/** - * ServiceClient sends service requests and receives responses. - */ -class ServiceClient { - /** - * The ServiceClient constructor - * @param credentials - The credentials used for authentication with the service. - * @param options - The service client options that govern the behavior of the client. - */ - constructor(credentials, - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options */ - options) { - if (!options) { - options = {}; - } - this._withCredentials = options.withCredentials || false; - this._httpClient = options.httpClient || getCachedDefaultHttpClient(); - this._requestPolicyOptions = new RequestPolicyOptions(options.httpPipelineLogger); - let requestPolicyFactories; - if (Array.isArray(options.requestPolicyFactories)) { - logger.info("ServiceClient: using custom request policies"); - requestPolicyFactories = options.requestPolicyFactories; - } - else { - let authPolicyFactory = undefined; - if (coreAuth.isTokenCredential(credentials)) { - logger.info("ServiceClient: creating bearer token authentication policy from provided credentials"); - // Create a wrapped RequestPolicyFactory here so that we can provide the - // correct scope to the BearerTokenAuthenticationPolicy at the first time - // one is requested. This is needed because generated ServiceClient - // implementations do not set baseUri until after ServiceClient's constructor - // is finished, leaving baseUri empty at the time when it is needed to - // build the correct scope name. - const wrappedPolicyFactory = () => { - let bearerTokenPolicyFactory = undefined; - // eslint-disable-next-line @typescript-eslint/no-this-alias - const serviceClient = this; - const serviceClientOptions = options; - return { - create(nextPolicy, createOptions) { - const credentialScopes = getCredentialScopes(serviceClientOptions, serviceClient.baseUri); - if (!credentialScopes) { - throw new Error(`When using credential, the ServiceClient must contain a baseUri or a credentialScopes in ServiceClientOptions. Unable to create a bearerTokenAuthenticationPolicy`); - } - if (bearerTokenPolicyFactory === undefined || bearerTokenPolicyFactory === null) { - bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); - } - return bearerTokenPolicyFactory.create(nextPolicy, createOptions); - }, - }; - }; - authPolicyFactory = wrappedPolicyFactory(); - } - else if (credentials && typeof credentials.signRequest === "function") { - logger.info("ServiceClient: creating signing policy from provided credentials"); - authPolicyFactory = signingPolicy(credentials); - } - else if (credentials !== undefined && credentials !== null) { - throw new Error("The credentials argument must implement the TokenCredential interface"); - } - logger.info("ServiceClient: using default request policies"); - requestPolicyFactories = createDefaultRequestPolicyFactories(authPolicyFactory, options); - if (options.requestPolicyFactories) { - // options.requestPolicyFactories can also be a function that manipulates - // the default requestPolicyFactories array - const newRequestPolicyFactories = options.requestPolicyFactories(requestPolicyFactories); - if (newRequestPolicyFactories) { - requestPolicyFactories = newRequestPolicyFactories; - } - } - } - this._requestPolicyFactories = requestPolicyFactories; - } - /** - * Send the provided httpRequest. - */ - sendRequest(options) { - if (options === null || options === undefined || typeof options !== "object") { - throw new Error("options cannot be null or undefined and it must be of type object."); - } - let httpRequest; - try { - if (isWebResourceLike(options)) { - options.validateRequestProperties(); - httpRequest = options; - } - else { - httpRequest = new WebResource(); - httpRequest = httpRequest.prepare(options); - } - } - catch (error) { - return Promise.reject(error); - } - let httpPipeline = this._httpClient; - if (this._requestPolicyFactories && this._requestPolicyFactories.length > 0) { - for (let i = this._requestPolicyFactories.length - 1; i >= 0; --i) { - httpPipeline = this._requestPolicyFactories[i].create(httpPipeline, this._requestPolicyOptions); - } - } - return httpPipeline.sendRequest(httpRequest); - } - /** - * Send an HTTP request that is populated using the provided OperationSpec. - * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. - * @param operationSpec - The OperationSpec to use to populate the httpRequest. - * @param callback - The callback to call when the response is received. - */ - async sendOperationRequest(operationArguments, operationSpec, callback) { - var _a; - if (typeof operationArguments.options === "function") { - callback = operationArguments.options; - operationArguments.options = undefined; - } - const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; - const httpRequest = new WebResource(); - let result; - try { - const baseUri = operationSpec.baseUrl || this.baseUri; - if (!baseUri) { - throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a baseUri string property that contains the base URL to use."); - } - httpRequest.method = operationSpec.httpMethod; - httpRequest.operationSpec = operationSpec; - const requestUrl = URLBuilder.parse(baseUri); - if (operationSpec.path) { - requestUrl.appendPath(operationSpec.path); - } - if (operationSpec.urlParameters && operationSpec.urlParameters.length > 0) { - for (const urlParameter of operationSpec.urlParameters) { - let urlParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, urlParameter, operationSpec.serializer); - urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, getPathStringFromParameter(urlParameter), serializerOptions); - if (!urlParameter.skipEncoding) { - urlParameterValue = encodeURIComponent(urlParameterValue); - } - requestUrl.replaceAll(`{${urlParameter.mapper.serializedName || getPathStringFromParameter(urlParameter)}}`, urlParameterValue); - } - } - if (operationSpec.queryParameters && operationSpec.queryParameters.length > 0) { - for (const queryParameter of operationSpec.queryParameters) { - let queryParameterValue = getOperationArgumentValueFromParameter(this, operationArguments, queryParameter, operationSpec.serializer); - if (queryParameterValue !== undefined && queryParameterValue !== null) { - queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, getPathStringFromParameter(queryParameter), serializerOptions); - if (queryParameter.collectionFormat !== undefined && - queryParameter.collectionFormat !== null) { - if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Multi) { - if (queryParameterValue.length === 0) { - // The collection is empty, no need to try serializing the current queryParam - continue; - } - else { - for (const index in queryParameterValue) { - const item = queryParameterValue[index]; - queryParameterValue[index] = - item === undefined || item === null ? "" : item.toString(); - } - } - } - else if (queryParameter.collectionFormat === exports.QueryCollectionFormat.Ssv || - queryParameter.collectionFormat === exports.QueryCollectionFormat.Tsv) { - queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); - } - } - if (!queryParameter.skipEncoding) { - if (Array.isArray(queryParameterValue)) { - for (const index in queryParameterValue) { - if (queryParameterValue[index] !== undefined && - queryParameterValue[index] !== null) { - queryParameterValue[index] = encodeURIComponent(queryParameterValue[index]); - } - } - } - else { - queryParameterValue = encodeURIComponent(queryParameterValue); - } - } - if (queryParameter.collectionFormat !== undefined && - queryParameter.collectionFormat !== null && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Multi && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Ssv && - queryParameter.collectionFormat !== exports.QueryCollectionFormat.Tsv) { - queryParameterValue = queryParameterValue.join(queryParameter.collectionFormat); - } - requestUrl.setQueryParameter(queryParameter.mapper.serializedName || getPathStringFromParameter(queryParameter), queryParameterValue); - } - } - } - httpRequest.url = requestUrl.toString(); - const contentType = operationSpec.contentType || this.requestContentType; - if (contentType && operationSpec.requestBody) { - httpRequest.headers.set("Content-Type", contentType); - } - if (operationSpec.headerParameters) { - for (const headerParameter of operationSpec.headerParameters) { - let headerValue = getOperationArgumentValueFromParameter(this, operationArguments, headerParameter, operationSpec.serializer); - if (headerValue !== undefined && headerValue !== null) { - headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, getPathStringFromParameter(headerParameter), serializerOptions); - const headerCollectionPrefix = headerParameter.mapper - .headerCollectionPrefix; - if (headerCollectionPrefix) { - for (const key of Object.keys(headerValue)) { - httpRequest.headers.set(headerCollectionPrefix + key, headerValue[key]); - } - } - else { - httpRequest.headers.set(headerParameter.mapper.serializedName || - getPathStringFromParameter(headerParameter), headerValue); - } - } - } - } - const options = operationArguments.options; - if (options) { - if (options.customHeaders) { - for (const customHeaderName in options.customHeaders) { - httpRequest.headers.set(customHeaderName, options.customHeaders[customHeaderName]); - } - } - if (options.abortSignal) { - httpRequest.abortSignal = options.abortSignal; - } - if (options.timeout) { - httpRequest.timeout = options.timeout; - } - if (options.onUploadProgress) { - httpRequest.onUploadProgress = options.onUploadProgress; - } - if (options.onDownloadProgress) { - httpRequest.onDownloadProgress = options.onDownloadProgress; - } - if (options.spanOptions) { - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - httpRequest.spanOptions = options.spanOptions; - } - if (options.tracingContext) { - httpRequest.tracingContext = options.tracingContext; - } - if (options.shouldDeserialize !== undefined && options.shouldDeserialize !== null) { - httpRequest.shouldDeserialize = options.shouldDeserialize; - } - } - httpRequest.withCredentials = this._withCredentials; - serializeRequestBody(this, httpRequest, operationArguments, operationSpec); - if (httpRequest.streamResponseStatusCodes === undefined) { - httpRequest.streamResponseStatusCodes = getStreamResponseStatusCodes(operationSpec); - } - let rawResponse; - let sendRequestError; - try { - rawResponse = await this.sendRequest(httpRequest); - } - catch (error) { - sendRequestError = error; - } - if (sendRequestError) { - if (sendRequestError.response) { - sendRequestError.details = flattenResponse(sendRequestError.response, operationSpec.responses[sendRequestError.statusCode] || - operationSpec.responses["default"]); - } - result = Promise.reject(sendRequestError); - } - else { - result = Promise.resolve(flattenResponse(rawResponse, operationSpec.responses[rawResponse.status])); - } - } - catch (error) { - result = Promise.reject(error); - } - const cb = callback; - if (cb) { - result - .then((res) => cb(null, res._response.parsedBody, res._response.request, res._response)) - .catch((err) => cb(err)); - } - return result; - } -} -function serializeRequestBody(serviceClient, httpRequest, operationArguments, operationSpec) { - var _a, _b, _c, _d, _e, _f; - const serializerOptions = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions) !== null && _b !== void 0 ? _b : {}; - const updatedOptions = { - rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", - includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, - xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, - }; - const xmlCharKey = serializerOptions.xmlCharKey; - if (operationSpec.requestBody && operationSpec.requestBody.mapper) { - httpRequest.body = getOperationArgumentValueFromParameter(serviceClient, operationArguments, operationSpec.requestBody, operationSpec.serializer); - const bodyMapper = operationSpec.requestBody.mapper; - const { required, xmlName, xmlElementName, serializedName, xmlNamespace, xmlNamespacePrefix } = bodyMapper; - const typeName = bodyMapper.type.name; - try { - if ((httpRequest.body !== undefined && httpRequest.body !== null) || required) { - const requestBodyParameterPathString = getPathStringFromParameter(operationSpec.requestBody); - httpRequest.body = operationSpec.serializer.serialize(bodyMapper, httpRequest.body, requestBodyParameterPathString, updatedOptions); - const isStream = typeName === MapperType.Stream; - if (operationSpec.isXML) { - const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; - const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, httpRequest.body, updatedOptions); - if (typeName === MapperType.Sequence) { - httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { - rootName: xmlName || serializedName, - xmlCharKey, - }); - } - else if (!isStream) { - httpRequest.body = stringifyXML(value, { - rootName: xmlName || serializedName, - xmlCharKey, - }); - } - } - else if (typeName === MapperType.String && - (((_f = operationSpec.contentType) === null || _f === void 0 ? void 0 : _f.match("text/plain")) || operationSpec.mediaType === "text")) { - // the String serializer has validated that request body is a string - // so just send the string. - return; - } - else if (!isStream) { - httpRequest.body = JSON.stringify(httpRequest.body); - } - } - } - catch (error) { - throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); - } - } - else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { - httpRequest.formData = {}; - for (const formDataParameter of operationSpec.formDataParameters) { - const formDataParameterValue = getOperationArgumentValueFromParameter(serviceClient, operationArguments, formDataParameter, operationSpec.serializer); - if (formDataParameterValue !== undefined && formDataParameterValue !== null) { - const formDataParameterPropertyName = formDataParameter.mapper.serializedName || getPathStringFromParameter(formDataParameter); - httpRequest.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, getPathStringFromParameter(formDataParameter), updatedOptions); - } - } - } -} -/** - * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself - */ -function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { - // Composite and Sequence schemas already got their root namespace set during serialization - // We just need to add xmlns to the other schema types - if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { - const result = {}; - result[options.xmlCharKey] = serializedValue; - result[XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; - return result; - } - return serializedValue; -} -function getValueOrFunctionResult(value, defaultValueCreator) { - let result; - if (typeof value === "string") { - result = value; - } - else { - result = defaultValueCreator(); - if (typeof value === "function") { - result = value(result); - } - } - return result; -} -function createDefaultRequestPolicyFactories(authPolicyFactory, options) { - const factories = []; - if (options.generateClientRequestIdHeader) { - factories.push(generateClientRequestIdPolicy(options.clientRequestIdHeaderName)); - } - if (authPolicyFactory) { - factories.push(authPolicyFactory); - } - const userAgentHeaderName = getValueOrFunctionResult(options.userAgentHeaderName, getDefaultUserAgentHeaderName); - const userAgentHeaderValue = getValueOrFunctionResult(options.userAgent, getDefaultUserAgentValue); - if (userAgentHeaderName && userAgentHeaderValue) { - factories.push(userAgentPolicy({ key: userAgentHeaderName, value: userAgentHeaderValue })); - } - factories.push(redirectPolicy()); - factories.push(rpRegistrationPolicy(options.rpRegistrationRetryTimeout)); - if (!options.noRetryPolicy) { - factories.push(exponentialRetryPolicy()); - factories.push(systemErrorRetryPolicy()); - factories.push(throttlingRetryPolicy()); - } - factories.push(deserializationPolicy(options.deserializationContentTypes)); - if (coreUtil.isNode) { - factories.push(proxyPolicy(options.proxySettings)); - } - factories.push(logPolicy({ logger: logger.info })); - return factories; -} -/** - * Creates an HTTP pipeline based on the given options. - * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. - * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. - * @returns A set of options that can be passed to create a new {@link ServiceClient}. - */ -function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { - const requestPolicyFactories = []; - if (pipelineOptions.sendStreamingJson) { - requestPolicyFactories.push(ndJsonPolicy()); - } - let userAgentValue = undefined; - if (pipelineOptions.userAgentOptions && pipelineOptions.userAgentOptions.userAgentPrefix) { - const userAgentInfo = []; - userAgentInfo.push(pipelineOptions.userAgentOptions.userAgentPrefix); - // Add the default user agent value if it isn't already specified - // by the userAgentPrefix option. - const defaultUserAgentInfo = getDefaultUserAgentValue(); - if (userAgentInfo.indexOf(defaultUserAgentInfo) === -1) { - userAgentInfo.push(defaultUserAgentInfo); - } - userAgentValue = userAgentInfo.join(" "); - } - const keepAliveOptions = Object.assign(Object.assign({}, DefaultKeepAliveOptions), pipelineOptions.keepAliveOptions); - const retryOptions = Object.assign(Object.assign({}, DefaultRetryOptions), pipelineOptions.retryOptions); - const redirectOptions = Object.assign(Object.assign({}, DefaultRedirectOptions), pipelineOptions.redirectOptions); - if (coreUtil.isNode) { - requestPolicyFactories.push(proxyPolicy(pipelineOptions.proxyOptions)); - } - const deserializationOptions = Object.assign(Object.assign({}, DefaultDeserializationOptions), pipelineOptions.deserializationOptions); - const loggingOptions = Object.assign({}, pipelineOptions.loggingOptions); - requestPolicyFactories.push(tracingPolicy({ userAgent: userAgentValue }), keepAlivePolicy(keepAliveOptions), userAgentPolicy({ value: userAgentValue }), generateClientRequestIdPolicy(), deserializationPolicy(deserializationOptions.expectedContentTypes), throttlingRetryPolicy(), systemErrorRetryPolicy(), exponentialRetryPolicy(retryOptions.maxRetries, retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs)); - if (redirectOptions.handleRedirects) { - requestPolicyFactories.push(redirectPolicy(redirectOptions.maxRetries)); - } - if (authPolicyFactory) { - requestPolicyFactories.push(authPolicyFactory); - } - requestPolicyFactories.push(logPolicy(loggingOptions)); - if (coreUtil.isNode && pipelineOptions.decompressResponse === false) { - requestPolicyFactories.push(disableResponseDecompressionPolicy()); - } - return { - httpClient: pipelineOptions.httpClient, - requestPolicyFactories, - }; -} -function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { - return getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameter.parameterPath, parameter.mapper, serializer); -} -function getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, parameterPath, parameterMapper, serializer) { - var _a; - let value; - if (typeof parameterPath === "string") { - parameterPath = [parameterPath]; - } - const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; - if (Array.isArray(parameterPath)) { - if (parameterPath.length > 0) { - if (parameterMapper.isConstant) { - value = parameterMapper.defaultValue; - } - else { - let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); - if (!propertySearchResult.propertyFound) { - propertySearchResult = getPropertyFromParameterPath(serviceClient, parameterPath); - } - let useDefaultValue = false; - if (!propertySearchResult.propertyFound) { - useDefaultValue = - parameterMapper.required || - (parameterPath[0] === "options" && parameterPath.length === 2); - } - value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; - } - // Serialize just for validation purposes. - const parameterPathString = getPathStringFromParameterPath(parameterPath, parameterMapper); - serializer.serialize(parameterMapper, value, parameterPathString, serializerOptions); - } - } - else { - if (parameterMapper.required) { - value = {}; - } - for (const propertyName in parameterPath) { - const propertyMapper = parameterMapper.type.modelProperties[propertyName]; - const propertyPath = parameterPath[propertyName]; - const propertyValue = getOperationArgumentValueFromParameterPath(serviceClient, operationArguments, propertyPath, propertyMapper, serializer); - // Serialize just for validation purposes. - const propertyPathString = getPathStringFromParameterPath(propertyPath, propertyMapper); - serializer.serialize(propertyMapper, propertyValue, propertyPathString, serializerOptions); - if (propertyValue !== undefined && propertyValue !== null) { - if (!value) { - value = {}; - } - value[propertyName] = propertyValue; - } - } - } - return value; -} -function getPropertyFromParameterPath(parent, parameterPath) { - const result = { propertyFound: false }; - let i = 0; - for (; i < parameterPath.length; ++i) { - const parameterPathPart = parameterPath[i]; - // Make sure to check inherited properties too, so don't use hasOwnProperty(). - if (parent !== undefined && parent !== null && parameterPathPart in parent) { - parent = parent[parameterPathPart]; - } - else { - break; - } - } - if (i === parameterPath.length) { - result.propertyValue = parent; - result.propertyFound = true; - } - return result; -} -/** - * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). - * @param _response - Wrapper object for http response. - * @param responseSpec - Mappers for how to parse the response properties. - * @returns - A normalized response object. - */ -function flattenResponse(_response, responseSpec) { - const parsedHeaders = _response.parsedHeaders; - const bodyMapper = responseSpec && responseSpec.bodyMapper; - const addOperationResponse = (obj) => { - return Object.defineProperty(obj, "_response", { - value: _response, - }); - }; - if (bodyMapper) { - const typeName = bodyMapper.type.name; - if (typeName === "Stream") { - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { blobBody: _response.blobBody, readableStreamBody: _response.readableStreamBody })); - } - const modelProperties = (typeName === "Composite" && bodyMapper.type.modelProperties) || {}; - const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); - if (typeName === "Sequence" || isPageableResponse) { - const arrayResponse = [...(_response.parsedBody || [])]; - for (const key of Object.keys(modelProperties)) { - if (modelProperties[key].serializedName) { - arrayResponse[key] = _response.parsedBody[key]; - } - } - if (parsedHeaders) { - for (const key of Object.keys(parsedHeaders)) { - arrayResponse[key] = parsedHeaders[key]; - } - } - addOperationResponse(arrayResponse); - return arrayResponse; - } - if (typeName === "Composite" || typeName === "Dictionary") { - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); - } - } - if (bodyMapper || - _response.request.method === "HEAD" || - isPrimitiveType(_response.parsedBody)) { - // primitive body types and HEAD booleans - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), { body: _response.parsedBody })); - } - return addOperationResponse(Object.assign(Object.assign({}, parsedHeaders), _response.parsedBody)); -} -function getCredentialScopes(options, baseUri) { - if (options === null || options === void 0 ? void 0 : options.credentialScopes) { - return options.credentialScopes; - } - if (baseUri) { - return `${baseUri}/.default`; - } - return undefined; -} - -// Copyright (c) Microsoft Corporation. -/** - * This function is only here for compatibility. Use createSpanFunction in core-tracing. - * - * @deprecated This function is only here for compatibility. Use createSpanFunction in core-tracing. - * @hidden - - * @param spanConfig - The name of the operation being performed. - * @param tracingOptions - The options for the underlying http request. - */ -function createSpanFunction(args) { - return coreTracing.createSpanFunction(args); -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Defines the default token refresh buffer duration. - */ -const TokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes -/** - * Provides an {@link AccessTokenCache} implementation which clears - * the cached {@link AccessToken}'s after the expiresOnTimestamp has - * passed. - * - * @deprecated No longer used in the bearer authorization policy. - */ -class ExpiringAccessTokenCache { - /** - * Constructs an instance of {@link ExpiringAccessTokenCache} with - * an optional expiration buffer time. - */ - constructor(tokenRefreshBufferMs = TokenRefreshBufferMs) { - this.cachedToken = undefined; - this.tokenRefreshBufferMs = tokenRefreshBufferMs; - } - /** - * Saves an access token into the internal in-memory cache. - * @param accessToken - Access token or undefined to clear the cache. - */ - setCachedToken(accessToken) { - this.cachedToken = accessToken; - } - /** - * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. - */ - getCachedToken() { - if (this.cachedToken && - Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { - this.cachedToken = undefined; - } - return this.cachedToken; - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Helps the core-http token authentication policies with requesting a new token if we're not currently waiting for a new token. - * - * @deprecated No longer used in the bearer authorization policy. - */ -class AccessTokenRefresher { - constructor(credential, scopes, requiredMillisecondsBeforeNewRefresh = 30000) { - this.credential = credential; - this.scopes = scopes; - this.requiredMillisecondsBeforeNewRefresh = requiredMillisecondsBeforeNewRefresh; - this.lastCalled = 0; - } - /** - * Returns true if the required milliseconds(defaulted to 30000) have been passed signifying - * that we are ready for a new refresh. - */ - isReady() { - // We're only ready for a new refresh if the required milliseconds have passed. - return (!this.lastCalled || Date.now() - this.lastCalled > this.requiredMillisecondsBeforeNewRefresh); - } - /** - * Stores the time in which it is called, - * then requests a new token, - * then sets this.promise to undefined, - * then returns the token. - */ - async getToken(options) { - this.lastCalled = Date.now(); - const token = await this.credential.getToken(this.scopes, options); - this.promise = undefined; - return token || undefined; - } - /** - * Requests a new token if we're not currently waiting for a new token. - * Returns null if the required time between each call hasn't been reached. - */ - refresh(options) { - if (!this.promise) { - this.promise = this.getToken(options); - } - return this.promise; - } -} - -// Copyright (c) Microsoft Corporation. -const HeaderConstants = Constants.HeaderConstants; -const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; -/** - * A simple {@link ServiceClientCredential} that authenticates with a username and a password. - */ -class BasicAuthenticationCredentials { - /** - * Creates a new BasicAuthenticationCredentials object. - * - * @param userName - User name. - * @param password - Password. - * @param authorizationScheme - The authorization scheme. - */ - constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { - /** - * Authorization scheme. Defaults to "Basic". - * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes - */ - this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; - if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { - throw new Error("userName cannot be null or undefined and must be of type string."); - } - if (password === null || password === undefined || typeof password.valueOf() !== "string") { - throw new Error("password cannot be null or undefined and must be of type string."); - } - this.userName = userName; - this.password = password; - this.authorizationScheme = authorizationScheme; - } - /** - * Signs a request with the Authentication header. - * - * @param webResource - The WebResourceLike to be signed. - * @returns The signed request object. - */ - signRequest(webResource) { - const credentials = `${this.userName}:${this.password}`; - const encodedCredentials = `${this.authorizationScheme} ${encodeString(credentials)}`; - if (!webResource.headers) - webResource.headers = new HttpHeaders(); - webResource.headers.set(HeaderConstants.AUTHORIZATION, encodedCredentials); - return Promise.resolve(webResource); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * Authenticates to a service using an API key. - */ -class ApiKeyCredentials { - /** - * @param options - Specifies the options to be provided for auth. Either header or query needs to be provided. - */ - constructor(options) { - if (!options || (options && !options.inHeader && !options.inQuery)) { - throw new Error(`options cannot be null or undefined. Either "inHeader" or "inQuery" property of the options object needs to be provided.`); - } - this.inHeader = options.inHeader; - this.inQuery = options.inQuery; - } - /** - * Signs a request with the values provided in the inHeader and inQuery parameter. - * - * @param webResource - The WebResourceLike to be signed. - * @returns The signed request object. - */ - signRequest(webResource) { - if (!webResource) { - return Promise.reject(new Error(`webResource cannot be null or undefined and must be of type "object".`)); - } - if (this.inHeader) { - if (!webResource.headers) { - webResource.headers = new HttpHeaders(); - } - for (const headerName in this.inHeader) { - webResource.headers.set(headerName, this.inHeader[headerName]); - } - } - if (this.inQuery) { - if (!webResource.url) { - return Promise.reject(new Error(`url cannot be null in the request object.`)); - } - if (webResource.url.indexOf("?") < 0) { - webResource.url += "?"; - } - for (const key in this.inQuery) { - if (!webResource.url.endsWith("?")) { - webResource.url += "&"; - } - webResource.url += `${key}=${this.inQuery[key]}`; - } - } - return Promise.resolve(webResource); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * A {@link TopicCredentials} object used for Azure Event Grid. - */ -class TopicCredentials extends ApiKeyCredentials { - /** - * Creates a new EventGrid TopicCredentials object. - * - * @param topicKey - The EventGrid topic key - */ - constructor(topicKey) { - if (!topicKey || (topicKey && typeof topicKey !== "string")) { - throw new Error("topicKey cannot be null or undefined and must be of type string."); - } - const options = { - inHeader: { - "aeg-sas-key": topicKey, - }, - }; - super(options); - } -} - -Object.defineProperty(exports, 'delay', { - enumerable: true, - get: function () { return coreUtil.delay; } -}); -Object.defineProperty(exports, 'isNode', { - enumerable: true, - get: function () { return coreUtil.isNode; } -}); -Object.defineProperty(exports, 'isTokenCredential', { - enumerable: true, - get: function () { return coreAuth.isTokenCredential; } -}); -exports.AccessTokenRefresher = AccessTokenRefresher; -exports.ApiKeyCredentials = ApiKeyCredentials; -exports.BaseRequestPolicy = BaseRequestPolicy; -exports.BasicAuthenticationCredentials = BasicAuthenticationCredentials; -exports.Constants = Constants; -exports.DefaultHttpClient = NodeFetchHttpClient; -exports.ExpiringAccessTokenCache = ExpiringAccessTokenCache; -exports.HttpHeaders = HttpHeaders; -exports.MapperType = MapperType; -exports.RequestPolicyOptions = RequestPolicyOptions; -exports.RestError = RestError; -exports.Serializer = Serializer; -exports.ServiceClient = ServiceClient; -exports.TopicCredentials = TopicCredentials; -exports.URLBuilder = URLBuilder; -exports.URLQuery = URLQuery; -exports.WebResource = WebResource; -exports.XML_ATTRKEY = XML_ATTRKEY; -exports.XML_CHARKEY = XML_CHARKEY; -exports.applyMixins = applyMixins; -exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; -exports.createPipelineFromOptions = createPipelineFromOptions; -exports.createSpanFunction = createSpanFunction; -exports.deserializationPolicy = deserializationPolicy; -exports.deserializeResponseBody = deserializeResponseBody; -exports.disableResponseDecompressionPolicy = disableResponseDecompressionPolicy; -exports.encodeUri = encodeUri; -exports.executePromisesSequentially = executePromisesSequentially; -exports.exponentialRetryPolicy = exponentialRetryPolicy; -exports.flattenResponse = flattenResponse; -exports.generateClientRequestIdPolicy = generateClientRequestIdPolicy; -exports.generateUuid = generateUuid; -exports.getDefaultProxySettings = getDefaultProxySettings; -exports.getDefaultUserAgentValue = getDefaultUserAgentValue; -exports.isDuration = isDuration; -exports.isValidUuid = isValidUuid; -exports.keepAlivePolicy = keepAlivePolicy; -exports.logPolicy = logPolicy; -exports.operationOptionsToRequestOptionsBase = operationOptionsToRequestOptionsBase; -exports.parseXML = parseXML; -exports.promiseToCallback = promiseToCallback; -exports.promiseToServiceCallback = promiseToServiceCallback; -exports.proxyPolicy = proxyPolicy; -exports.redirectPolicy = redirectPolicy; -exports.serializeObject = serializeObject; -exports.signingPolicy = signingPolicy; -exports.stringifyXML = stringifyXML; -exports.stripRequest = stripRequest; -exports.stripResponse = stripResponse; -exports.systemErrorRetryPolicy = systemErrorRetryPolicy; -exports.throttlingRetryPolicy = throttlingRetryPolicy; -exports.tracingPolicy = tracingPolicy; -exports.userAgentPolicy = userAgentPolicy; -//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-http/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js b/node_modules/@azure/core-http/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js deleted file mode 100644 index 0d260f30d..000000000 --- a/node_modules/@azure/core-http/node_modules/@azure/abort-controller/dist-esm/src/AbortController.js +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { AbortSignal, abortSignal } from "./AbortSignal"; -/** - * This error is thrown when an asynchronous operation has been aborted. - * Check for this error by testing the `name` that the name property of the - * error matches `"AbortError"`. - * - * @example - * ```ts - * const controller = new AbortController(); - * controller.abort(); - * try { - * doAsyncWork(controller.signal) - * } catch (e) { - * if (e.name === 'AbortError') { - * // handle abort error here. - * } - * } - * ``` - */ -export class AbortError extends Error { - constructor(message) { - super(message); - this.name = "AbortError"; - } -} -/** - * An AbortController provides an AbortSignal and the associated controls to signal - * that an asynchronous operation should be aborted. - * - * @example - * Abort an operation when another event fires - * ```ts - * const controller = new AbortController(); - * const signal = controller.signal; - * doAsyncWork(signal); - * button.addEventListener('click', () => controller.abort()); - * ``` - * - * @example - * Share aborter cross multiple operations in 30s - * ```ts - * // Upload the same data to 2 different data centers at the same time, - * // abort another when any of them is finished - * const controller = AbortController.withTimeout(30 * 1000); - * doAsyncWork(controller.signal).then(controller.abort); - * doAsyncWork(controller.signal).then(controller.abort); - *``` - * - * @example - * Cascaded aborting - * ```ts - * // All operations can't take more than 30 seconds - * const aborter = Aborter.timeout(30 * 1000); - * - * // Following 2 operations can't take more than 25 seconds - * await doAsyncWork(aborter.withTimeout(25 * 1000)); - * await doAsyncWork(aborter.withTimeout(25 * 1000)); - * ``` - */ -export class AbortController { - // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types - constructor(parentSignals) { - this._signal = new AbortSignal(); - if (!parentSignals) { - return; - } - // coerce parentSignals into an array - if (!Array.isArray(parentSignals)) { - // eslint-disable-next-line prefer-rest-params - parentSignals = arguments; - } - for (const parentSignal of parentSignals) { - // if the parent signal has already had abort() called, - // then call abort on this signal as well. - if (parentSignal.aborted) { - this.abort(); - } - else { - // when the parent signal aborts, this signal should as well. - parentSignal.addEventListener("abort", () => { - this.abort(); - }); - } - } - } - /** - * The AbortSignal associated with this controller that will signal aborted - * when the abort method is called on this controller. - * - * @readonly - */ - get signal() { - return this._signal; - } - /** - * Signal that any operations passed this controller's associated abort signal - * to cancel any remaining work and throw an `AbortError`. - */ - abort() { - abortSignal(this._signal); - } - /** - * Creates a new AbortSignal instance that will abort after the provided ms. - * @param ms - Elapsed time in milliseconds to trigger an abort. - */ - static timeout(ms) { - const signal = new AbortSignal(); - const timer = setTimeout(abortSignal, ms, signal); - // Prevent the active Timer from keeping the Node.js event loop active. - if (typeof timer.unref === "function") { - timer.unref(); - } - return signal; - } -} -//# sourceMappingURL=AbortController.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js b/node_modules/@azure/core-http/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js deleted file mode 100644 index e97336ad1..000000000 --- a/node_modules/@azure/core-http/node_modules/@azure/abort-controller/dist-esm/src/AbortSignal.js +++ /dev/null @@ -1,115 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/// -const listenersMap = new WeakMap(); -const abortedMap = new WeakMap(); -/** - * An aborter instance implements AbortSignal interface, can abort HTTP requests. - * - * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. - * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation - * cannot or will not ever be cancelled. - * - * @example - * Abort without timeout - * ```ts - * await doAsyncWork(AbortSignal.none); - * ``` - */ -export class AbortSignal { - constructor() { - /** - * onabort event listener. - */ - this.onabort = null; - listenersMap.set(this, []); - abortedMap.set(this, false); - } - /** - * Status of whether aborted or not. - * - * @readonly - */ - get aborted() { - if (!abortedMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - return abortedMap.get(this); - } - /** - * Creates a new AbortSignal instance that will never be aborted. - * - * @readonly - */ - static get none() { - return new AbortSignal(); - } - /** - * Added new "abort" event listener, only support "abort" event. - * - * @param _type - Only support "abort" event - * @param listener - The listener to be added - */ - addEventListener( - // tslint:disable-next-line:variable-name - _type, listener) { - if (!listenersMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - const listeners = listenersMap.get(this); - listeners.push(listener); - } - /** - * Remove "abort" event listener, only support "abort" event. - * - * @param _type - Only support "abort" event - * @param listener - The listener to be removed - */ - removeEventListener( - // tslint:disable-next-line:variable-name - _type, listener) { - if (!listenersMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - const listeners = listenersMap.get(this); - const index = listeners.indexOf(listener); - if (index > -1) { - listeners.splice(index, 1); - } - } - /** - * Dispatches a synthetic event to the AbortSignal. - */ - dispatchEvent(_event) { - throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); - } -} -/** - * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. - * Will try to trigger abort event for all linked AbortSignal nodes. - * - * - If there is a timeout, the timer will be cancelled. - * - If aborted is true, nothing will happen. - * - * @internal - */ -// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters -export function abortSignal(signal) { - if (signal.aborted) { - return; - } - if (signal.onabort) { - signal.onabort.call(signal); - } - const listeners = listenersMap.get(signal); - if (listeners) { - // Create a copy of listeners so mutations to the array - // (e.g. via removeListener calls) don't affect the listeners - // we invoke. - listeners.slice().forEach((listener) => { - listener.call(signal, { type: "abort" }); - }); - } - abortedMap.set(signal, true); -} -//# sourceMappingURL=AbortSignal.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/@azure/abort-controller/dist-esm/src/index.js b/node_modules/@azure/core-http/node_modules/@azure/abort-controller/dist-esm/src/index.js deleted file mode 100644 index ddbf505bd..000000000 --- a/node_modules/@azure/core-http/node_modules/@azure/abort-controller/dist-esm/src/index.js +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// Changes to Aborter -// * Rename Aborter to AbortSignal -// * Remove withValue and getValue - async context should be solved differently/wholistically, not tied to cancellation -// * Remove withTimeout, it's moved to the controller -// * AbortSignal constructor no longer takes a parent. Cancellation graphs are created from the controller. -// Potential changes to align with DOM Spec -// * dispatchEvent on Signal -export { AbortController, AbortError } from "./AbortController"; -export { AbortSignal } from "./AbortSignal"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-http/node_modules/@azure/abort-controller/dist/index.js b/node_modules/@azure/core-http/node_modules/@azure/abort-controller/dist/index.js deleted file mode 100644 index 650dd5f3e..000000000 --- a/node_modules/@azure/core-http/node_modules/@azure/abort-controller/dist/index.js +++ /dev/null @@ -1,239 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, '__esModule', { value: true }); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/// -const listenersMap = new WeakMap(); -const abortedMap = new WeakMap(); -/** - * An aborter instance implements AbortSignal interface, can abort HTTP requests. - * - * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. - * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation - * cannot or will not ever be cancelled. - * - * @example - * Abort without timeout - * ```ts - * await doAsyncWork(AbortSignal.none); - * ``` - */ -class AbortSignal { - constructor() { - /** - * onabort event listener. - */ - this.onabort = null; - listenersMap.set(this, []); - abortedMap.set(this, false); - } - /** - * Status of whether aborted or not. - * - * @readonly - */ - get aborted() { - if (!abortedMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - return abortedMap.get(this); - } - /** - * Creates a new AbortSignal instance that will never be aborted. - * - * @readonly - */ - static get none() { - return new AbortSignal(); - } - /** - * Added new "abort" event listener, only support "abort" event. - * - * @param _type - Only support "abort" event - * @param listener - The listener to be added - */ - addEventListener( - // tslint:disable-next-line:variable-name - _type, listener) { - if (!listenersMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - const listeners = listenersMap.get(this); - listeners.push(listener); - } - /** - * Remove "abort" event listener, only support "abort" event. - * - * @param _type - Only support "abort" event - * @param listener - The listener to be removed - */ - removeEventListener( - // tslint:disable-next-line:variable-name - _type, listener) { - if (!listenersMap.has(this)) { - throw new TypeError("Expected `this` to be an instance of AbortSignal."); - } - const listeners = listenersMap.get(this); - const index = listeners.indexOf(listener); - if (index > -1) { - listeners.splice(index, 1); - } - } - /** - * Dispatches a synthetic event to the AbortSignal. - */ - dispatchEvent(_event) { - throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); - } -} -/** - * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. - * Will try to trigger abort event for all linked AbortSignal nodes. - * - * - If there is a timeout, the timer will be cancelled. - * - If aborted is true, nothing will happen. - * - * @internal - */ -// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters -function abortSignal(signal) { - if (signal.aborted) { - return; - } - if (signal.onabort) { - signal.onabort.call(signal); - } - const listeners = listenersMap.get(signal); - if (listeners) { - // Create a copy of listeners so mutations to the array - // (e.g. via removeListener calls) don't affect the listeners - // we invoke. - listeners.slice().forEach((listener) => { - listener.call(signal, { type: "abort" }); - }); - } - abortedMap.set(signal, true); -} - -// Copyright (c) Microsoft Corporation. -/** - * This error is thrown when an asynchronous operation has been aborted. - * Check for this error by testing the `name` that the name property of the - * error matches `"AbortError"`. - * - * @example - * ```ts - * const controller = new AbortController(); - * controller.abort(); - * try { - * doAsyncWork(controller.signal) - * } catch (e) { - * if (e.name === 'AbortError') { - * // handle abort error here. - * } - * } - * ``` - */ -class AbortError extends Error { - constructor(message) { - super(message); - this.name = "AbortError"; - } -} -/** - * An AbortController provides an AbortSignal and the associated controls to signal - * that an asynchronous operation should be aborted. - * - * @example - * Abort an operation when another event fires - * ```ts - * const controller = new AbortController(); - * const signal = controller.signal; - * doAsyncWork(signal); - * button.addEventListener('click', () => controller.abort()); - * ``` - * - * @example - * Share aborter cross multiple operations in 30s - * ```ts - * // Upload the same data to 2 different data centers at the same time, - * // abort another when any of them is finished - * const controller = AbortController.withTimeout(30 * 1000); - * doAsyncWork(controller.signal).then(controller.abort); - * doAsyncWork(controller.signal).then(controller.abort); - *``` - * - * @example - * Cascaded aborting - * ```ts - * // All operations can't take more than 30 seconds - * const aborter = Aborter.timeout(30 * 1000); - * - * // Following 2 operations can't take more than 25 seconds - * await doAsyncWork(aborter.withTimeout(25 * 1000)); - * await doAsyncWork(aborter.withTimeout(25 * 1000)); - * ``` - */ -class AbortController { - // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types - constructor(parentSignals) { - this._signal = new AbortSignal(); - if (!parentSignals) { - return; - } - // coerce parentSignals into an array - if (!Array.isArray(parentSignals)) { - // eslint-disable-next-line prefer-rest-params - parentSignals = arguments; - } - for (const parentSignal of parentSignals) { - // if the parent signal has already had abort() called, - // then call abort on this signal as well. - if (parentSignal.aborted) { - this.abort(); - } - else { - // when the parent signal aborts, this signal should as well. - parentSignal.addEventListener("abort", () => { - this.abort(); - }); - } - } - } - /** - * The AbortSignal associated with this controller that will signal aborted - * when the abort method is called on this controller. - * - * @readonly - */ - get signal() { - return this._signal; - } - /** - * Signal that any operations passed this controller's associated abort signal - * to cancel any remaining work and throw an `AbortError`. - */ - abort() { - abortSignal(this._signal); - } - /** - * Creates a new AbortSignal instance that will abort after the provided ms. - * @param ms - Elapsed time in milliseconds to trigger an abort. - */ - static timeout(ms) { - const signal = new AbortSignal(); - const timer = setTimeout(abortSignal, ms, signal); - // Prevent the active Timer from keeping the Node.js event loop active. - if (typeof timer.unref === "function") { - timer.unref(); - } - return signal; - } -} - -exports.AbortController = AbortController; -exports.AbortError = AbortError; -exports.AbortSignal = AbortSignal; -//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-http/node_modules/@azure/abort-controller/package.json b/node_modules/@azure/core-http/node_modules/@azure/abort-controller/package.json deleted file mode 100644 index 6126bc278..000000000 --- a/node_modules/@azure/core-http/node_modules/@azure/abort-controller/package.json +++ /dev/null @@ -1,104 +0,0 @@ -{ - "name": "@azure/abort-controller", - "sdk-type": "client", - "version": "1.1.0", - "description": "Microsoft Azure SDK for JavaScript - Aborter", - "main": "./dist/index.js", - "module": "dist-esm/src/index.js", - "scripts": { - "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", - "build:samples": "echo Obsolete", - "build:test": "tsc -p . && dev-tool run bundle", - "build:types": "downlevel-dts types/src types/3.1", - "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local && npm run build:types", - "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", - "clean": "rimraf dist dist-* temp types *.tgz *.log", - "execute:samples": "echo skipped", - "extract-api": "tsc -p . && api-extractor run --local", - "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", - "integration-test:browser": "echo skipped", - "integration-test:node": "echo skipped", - "integration-test": "npm run integration-test:node && npm run integration-test:browser", - "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", - "lint": "eslint package.json api-extractor.json src test --ext .ts", - "pack": "npm pack 2>&1", - "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", - "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", - "test": "npm run clean && tsc -p . && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", - "unit-test:browser": "karma start --single-run", - "unit-test:node": "mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"", - "unit-test": "npm run unit-test:node && npm run unit-test:browser" - }, - "types": "./types/src/index.d.ts", - "typesVersions": { - "<3.6": { - "types/src/*": [ - "types/3.1/*" - ] - } - }, - "files": [ - "dist/", - "dist-esm/src/", - "shims-public.d.ts", - "types/src", - "types/3.1", - "README.md", - "LICENSE" - ], - "engines": { - "node": ">=12.0.0" - }, - "repository": "github:Azure/azure-sdk-for-js", - "keywords": [ - "azure", - "aborter", - "abortsignal", - "cancellation", - "node.js", - "typescript", - "javascript", - "browser", - "cloud" - ], - "author": "Microsoft Corporation", - "license": "MIT", - "bugs": { - "url": "https://github.com/Azure/azure-sdk-for-js/issues" - }, - "homepage": "https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/core/abort-controller/README.md", - "sideEffects": false, - "dependencies": { - "tslib": "^2.2.0" - }, - "devDependencies": { - "@azure/dev-tool": "^1.0.0", - "@azure/eslint-plugin-azure-sdk": "^3.0.0", - "@microsoft/api-extractor": "7.18.11", - "@types/chai": "^4.1.6", - "@types/mocha": "^7.0.2", - "@types/node": "^12.0.0", - "chai": "^4.2.0", - "cross-env": "^7.0.2", - "downlevel-dts": "^0.8.0", - "eslint": "^7.15.0", - "karma": "^6.2.0", - "karma-chrome-launcher": "^3.0.0", - "karma-coverage": "^2.0.0", - "karma-edge-launcher": "^0.4.2", - "karma-env-preprocessor": "^0.1.1", - "karma-firefox-launcher": "^1.1.0", - "karma-ie-launcher": "^1.0.0", - "karma-junit-reporter": "^2.0.1", - "karma-mocha": "^2.0.1", - "karma-mocha-reporter": "^2.2.5", - "karma-sourcemap-loader": "^0.3.8", - "mocha": "^7.1.1", - "mocha-junit-reporter": "^2.0.0", - "nyc": "^15.0.0", - "prettier": "^2.5.1", - "rimraf": "^3.0.0", - "ts-node": "^10.0.0", - "typescript": "~4.6.0" - } -} diff --git a/node_modules/@azure/core-http/node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json b/node_modules/@azure/core-http/node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json deleted file mode 100644 index 7b5aee324..000000000 --- a/node_modules/@azure/core-http/node_modules/@azure/abort-controller/types/src/tsdoc-metadata.json +++ /dev/null @@ -1,11 +0,0 @@ -// This file is read by tools that parse documentation comments conforming to the TSDoc standard. -// It should be published with your NPM package. It should not be tracked by Git. -{ - "tsdocVersion": "0.12", - "toolPackages": [ - { - "packageName": "@microsoft/api-extractor", - "packageVersion": "7.18.11" - } - ] -} diff --git a/node_modules/@azure/core-http/package.json b/node_modules/@azure/core-http/package.json deleted file mode 100644 index b1ae84d8b..000000000 --- a/node_modules/@azure/core-http/package.json +++ /dev/null @@ -1,177 +0,0 @@ -{ - "name": "@azure/core-http", - "sdk-type": "client", - "author": "Microsoft Corporation", - "version": "3.0.4", - "description": "Isomorphic client Runtime for Typescript/node.js/browser javascript client libraries generated using AutoRest", - "tags": [ - "isomorphic", - "browser", - "javascript", - "node", - "microsoft", - "autorest", - "clientruntime" - ], - "engines": { - "node": ">=14.0.0" - }, - "keywords": [ - "isomorphic", - "browser", - "javascript", - "node", - "microsoft", - "autorest", - "clientruntime", - "azure", - "cloud" - ], - "main": "dist/index.js", - "module": "./dist-esm/src/index.js", - "types": "./types/latest/src/index.d.ts", - "typesVersions": { - "<3.6": { - "types/latest/src/*": [ - "types/3.1/src/*" - ] - } - }, - "files": [ - "dist/", - "dist-esm/src/", - "dom-shim.d.ts", - "types/*/src/**/*.d.ts", - "types/*/src/**/*.d.ts.map", - "README.md", - "LICENSE" - ], - "browser": { - "./dist-esm/src/policies/msRestUserAgentPolicy.js": "./dist-esm/src/policies/msRestUserAgentPolicy.browser.js", - "./dist-esm/src/policies/disableResponseDecompressionPolicy.js": "./dist-esm/src/policies/disableResponseDecompressionPolicy.browser.js", - "./dist-esm/src/policies/proxyPolicy.js": "./dist-esm/src/policies/proxyPolicy.browser.js", - "./dist-esm/src/util/base64.js": "./dist-esm/src/util/base64.browser.js", - "./dist-esm/src/util/xml.js": "./dist-esm/src/util/xml.browser.js", - "./dist-esm/src/defaultHttpClient.js": "./dist-esm/src/defaultHttpClient.browser.js", - "./dist-esm/src/util/inspect.js": "./dist-esm/src/util/inspect.browser.js" - }, - "react-native": { - "./dist/index.js": "./dist-esm/src/index.js", - "./dist-esm/src/util/xml.js": "./dist-esm/src/util/xml.js", - "./dist-esm/src/policies/msRestUserAgentPolicy.js": "./dist-esm/src/policies/msRestUserAgentPolicy.native.js" - }, - "license": "MIT", - "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-http/README.md", - "repository": "github:Azure/azure-sdk-for-js", - "bugs": { - "url": "https://github.com/Azure/azure-sdk-for-js/issues" - }, - "scripts": { - "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", - "build:samples": "echo Obsolete", - "build:test": "tsc -p tsconfig.es.json && dev-tool run bundle", - "build:types": "downlevel-dts types/latest/ types/3.1/", - "build": "npm run clean && tsc -p tsconfig.es.json && dev-tool run bundle && api-extractor run --local && npm run build:types", - "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", - "clean": "rimraf dist dist-* temp types *.tgz *.log", - "execute:samples": "echo skipped", - "extract-api": "tsc -p tsconfig.es.json && api-extractor run --local", - "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", - "integration-test:browser": "echo skipped", - "integration-test:node": "echo skipped", - "integration-test": "npm run integration-test:node && npm run integration-test:browser", - "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", - "lint": "eslint package.json api-extractor.json src test --ext .ts", - "pack": "npm pack 2>&1", - "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", - "test:node": "npm run clean && tsc -p tsconfig.es.json && npm run unit-test:node && npm run integration-test:node", - "test": "npm run clean && tsc -p tsconfig.es.json && npm run unit-test:node && dev-tool run bundle && npm run unit-test:browser && npm run integration-test", - "unit-test": "npm run unit-test:node && npm run unit-test:browser", - "unit-test:browser": "karma start --single-run", - "unit-test:node": "cross-env TS_NODE_FILES=true mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 500000 --full-trace --exclude \"test/**/*.browser.ts\" \"test/**/*.ts\"" - }, - "sideEffects": false, - "nyc": { - "extension": [ - ".ts" - ], - "exclude": [ - "coverage/**/*", - "**/*.d.ts", - "**/*.js" - ], - "reporter": [ - "text", - "html", - "cobertura" - ], - "all": true - }, - "//metadata": { - "constantPaths": [ - { - "path": "src/util/constants.ts", - "prefix": "coreHttpVersion" - } - ] - }, - "dependencies": { - "@azure/abort-controller": "^1.0.0", - "@azure/core-auth": "^1.3.0", - "@azure/core-tracing": "1.0.0-preview.13", - "@azure/core-util": "^1.1.1", - "@azure/logger": "^1.0.0", - "@types/node-fetch": "^2.5.0", - "@types/tunnel": "^0.0.3", - "form-data": "^4.0.0", - "node-fetch": "^2.6.7", - "process": "^0.11.10", - "tslib": "^2.2.0", - "tunnel": "^0.0.6", - "uuid": "^8.3.0", - "xml2js": "^0.5.0" - }, - "devDependencies": { - "@azure/eslint-plugin-azure-sdk": "^3.0.0", - "@azure/dev-tool": "^1.0.0", - "@azure/logger-js": "^1.0.2", - "@azure/test-utils": "^1.0.0", - "@microsoft/api-extractor": "^7.31.1", - "@opentelemetry/api": "^1.4.0", - "@types/chai": "^4.1.6", - "@types/express": "^4.16.0", - "@types/mocha": "^7.0.2", - "@types/node": "^14.0.0", - "@types/sinon": "^9.0.4", - "@types/trusted-types": "^2.0.0", - "@types/uuid": "^8.0.0", - "@types/xml2js": "^0.4.11", - "chai": "^4.2.0", - "cross-env": "^7.0.2", - "downlevel-dts": "^0.10.0", - "eslint": "^8.0.0", - "express": "^4.16.3", - "fetch-mock": "^9.10.1", - "karma": "^6.2.0", - "karma-chai": "^0.1.0", - "karma-chrome-launcher": "^3.0.0", - "karma-edge-launcher": "^0.4.2", - "karma-firefox-launcher": "^1.1.0", - "karma-mocha": "^2.0.1", - "karma-sourcemap-loader": "^0.3.8", - "mocha": "^7.1.1", - "mocha-junit-reporter": "^2.0.0", - "npm-run-all": "^4.1.5", - "nyc": "^15.0.0", - "prettier": "^2.5.1", - "puppeteer": "^19.2.2", - "regenerator-runtime": "^0.13.3", - "rimraf": "^3.0.0", - "shx": "^0.3.2", - "sinon": "^9.0.2", - "ts-node": "^10.0.0", - "typescript": "~4.8.0", - "uglify-js": "^3.4.9", - "xhr-mock": "^2.4.1" - } -} diff --git a/node_modules/@azure/core-lro/dist/browser/http/operation.js b/node_modules/@azure/core-lro/dist/browser/http/operation.js index 038384478..e31ca08c9 100644 --- a/node_modules/@azure/core-lro/dist/browser/http/operation.js +++ b/node_modules/@azure/core-lro/dist/browser/http/operation.js @@ -4,7 +4,7 @@ import { initOperation, pollOperation } from "../poller/operation.js"; import { logger } from "../logger.js"; function getOperationLocationPollingUrl(inputs) { const { azureAsyncOperation, operationLocation } = inputs; - return operationLocation ?? azureAsyncOperation; + return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; } function getLocationHeader(rawResponse) { return rawResponse.headers["location"]; @@ -16,6 +16,7 @@ function getAzureAsyncOperationHeader(rawResponse) { return rawResponse.headers["azure-asyncoperation"]; } function findResourceLocation(inputs) { + var _a; const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; switch (requestMethod) { case "PUT": { @@ -25,7 +26,7 @@ function findResourceLocation(inputs) { return undefined; } case "PATCH": { - return getDefault() ?? requestPath; + return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath; } default: { return getDefault(); @@ -52,7 +53,7 @@ export function inferLroMode(inputs) { const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); const location = getLocationHeader(rawResponse); - const normalizedRequestMethod = requestMethod?.toLocaleUpperCase(); + const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); if (pollingUrl !== undefined) { return { mode: "OperationLocation", @@ -86,7 +87,7 @@ function transformStatus(inputs) { if (typeof status !== "string" && status !== undefined) { throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); } - switch (status?.toLocaleLowerCase()) { + switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { case undefined: return toOperationStatus(statusCode); case "succeeded": @@ -109,12 +110,14 @@ function transformStatus(inputs) { } } function getStatus(rawResponse) { - const { status } = rawResponse.body ?? {}; + var _a; + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; return transformStatus({ status, statusCode: rawResponse.statusCode }); } function getProvisioningState(rawResponse) { - const { properties, provisioningState } = rawResponse.body ?? {}; - const status = properties?.provisioningState ?? provisioningState; + var _a, _b; + const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; return transformStatus({ status, statusCode: rawResponse.statusCode }); } function toOperationStatus(statusCode) { @@ -162,7 +165,8 @@ function calculatePollingIntervalFromDate(retryAfterDate) { export function getStatusFromInitialResponse(inputs) { const { response, state, operationLocation } = inputs; function helper() { - const mode = state.config.metadata?.["mode"]; + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case undefined: return toOperationStatus(response.rawResponse.statusCode); @@ -189,12 +193,7 @@ export async function initHttpOperation(inputs) { requestMethod: lro.requestMethod, resourceLocationConfig, }); - return { - response, - operationLocation: config?.operationLocation, - resourceLocation: config?.resourceLocation, - ...(config?.mode ? { metadata: { mode: config.mode } } : {}), - }; + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); }, stateProxy, processResult: processResult @@ -205,7 +204,8 @@ export async function initHttpOperation(inputs) { }); } export function getOperationLocation({ rawResponse }, state) { - const mode = state.config.metadata?.["mode"]; + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case "OperationLocation": { return getOperationLocationPollingUrl({ @@ -223,7 +223,8 @@ export function getOperationLocation({ rawResponse }, state) { } } export function getOperationStatus({ rawResponse }, state) { - const mode = state.config.metadata?.["mode"]; + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case "OperationLocation": { return getStatus(rawResponse); @@ -239,7 +240,8 @@ export function getOperationStatus({ rawResponse }, state) { } } function accessBodyProperty({ flatResponse, rawResponse }, prop) { - return flatResponse?.[prop] ?? rawResponse.body?.[prop]; + var _a, _b; + return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; } export function getResourceLocation(res, state) { const loc = accessBodyProperty(res, "resourceLocation"); diff --git a/node_modules/@azure/core-lro/dist/browser/http/poller.js b/node_modules/@azure/core-lro/dist/browser/http/poller.js index 84379fdd7..ce3aa2b79 100644 --- a/node_modules/@azure/core-lro/dist/browser/http/poller.js +++ b/node_modules/@azure/core-lro/dist/browser/http/poller.js @@ -28,12 +28,7 @@ export async function createHttpPoller(lro, options) { requestMethod: lro.requestMethod, resourceLocationConfig, }); - return { - response, - operationLocation: config?.operationLocation, - resourceLocation: config?.resourceLocation, - ...(config?.mode ? { metadata: { mode: config.mode } } : {}), - }; + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); }, poll: lro.sendPollRequest, }, { diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.js b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.js index 351691a7b..7d247af7e 100644 --- a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.js +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/lroEngine.js @@ -8,7 +8,6 @@ import { deserializeState } from "../../poller/operation.js"; * The LRO Engine, a class that performs polling. */ export class LroEngine extends Poller { - config; constructor(lro, options) { const { intervalInMs = POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; const state = resumeFrom diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.js b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.js index 4051ad80d..a073fb044 100644 --- a/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.js +++ b/node_modules/@azure/core-lro/dist/browser/legacy/lroEngine/operation.js @@ -20,14 +20,6 @@ const createStateProxy = () => ({ isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), }); export class GenericPollOperation { - state; - lro; - setErrorAsResult; - lroResourceLocationConfig; - processResult; - updateState; - isDone; - pollerConfig; constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { this.state = state; this.lro = lro; @@ -41,18 +33,16 @@ export class GenericPollOperation { this.pollerConfig = pollerConfig; } async update(options) { + var _a; const stateProxy = createStateProxy(); if (!this.state.isStarted) { - this.state = { - ...this.state, - ...(await initHttpOperation({ - lro: this.lro, - stateProxy, - resourceLocationConfig: this.lroResourceLocationConfig, - processResult: this.processResult, - setErrorAsResult: this.setErrorAsResult, - })), - }; + this.state = Object.assign(Object.assign({}, this.state), (await initHttpOperation({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult, + }))); } const updateState = this.updateState; const isDone = this.isDone; @@ -75,7 +65,7 @@ export class GenericPollOperation { setErrorAsResult: this.setErrorAsResult, }); } - options?.fireProgress?.(this.state); + (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); return this; } async cancel() { diff --git a/node_modules/@azure/core-lro/dist/browser/legacy/poller.js b/node_modules/@azure/core-lro/dist/browser/legacy/poller.js index de6f912e6..4335b2fb1 100644 --- a/node_modules/@azure/core-lro/dist/browser/legacy/poller.js +++ b/node_modules/@azure/core-lro/dist/browser/legacy/poller.js @@ -85,20 +85,6 @@ export class PollerCancelledError extends Error { */ // eslint-disable-next-line no-use-before-define export class Poller { - /** controls whether to throw an error if the operation failed or was canceled. */ - resolveOnUnsuccessful = false; - stopped = true; - resolve; - reject; - pollOncePromise; - cancelPromise; - promise; - pollProgressCallbacks = []; - /** - * The poller's operation is available in full to any of the methods of the Poller class - * and any class extending the Poller class. - */ - operation; /** * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. * @@ -165,6 +151,10 @@ export class Poller { * @param operation - Must contain the basic properties of `PollOperation`. */ constructor(operation) { + /** controls whether to throw an error if the operation failed or was canceled. */ + this.resolveOnUnsuccessful = false; + this.stopped = true; + this.pollProgressCallbacks = []; this.operation = operation; this.promise = new Promise((resolve, reject) => { this.resolve = resolve; diff --git a/node_modules/@azure/core-lro/dist/browser/poller/operation.js b/node_modules/@azure/core-lro/dist/browser/poller/operation.js index 869eaa528..90996db36 100644 --- a/node_modules/@azure/core-lro/dist/browser/poller/operation.js +++ b/node_modules/@azure/core-lro/dist/browser/poller/operation.js @@ -52,7 +52,7 @@ function processOperationStatus(result) { break; } case "failed": { - const err = getError?.(response); + const err = getError === null || getError === void 0 ? void 0 : getError(response); let postfix = ""; if (err) { const { code, message } = simplifyError(err); @@ -69,7 +69,7 @@ function processOperationStatus(result) { break; } } - if (isDone?.(response, state) || + if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || (isDone === undefined && ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { stateProxy.setResult(state, buildResult({ @@ -90,7 +90,7 @@ export async function initOperation(inputs) { const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; const { operationLocation, resourceLocation, metadata, response } = await init(); if (operationLocation) - withOperationLocation?.(operationLocation, false); + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); const config = { metadata, operationLocation, @@ -148,19 +148,19 @@ export async function pollOperation(inputs) { setErrorAsResult, }); if (!terminalStates.includes(status)) { - const intervalInMs = getPollingInterval?.(response); + const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); if (intervalInMs) setDelay(intervalInMs); - const location = getOperationLocation?.(response, state); + const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); if (location !== undefined) { const isUpdated = operationLocation !== location; state.config.operationLocation = location; - withOperationLocation?.(location, isUpdated); + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); } else - withOperationLocation?.(operationLocation, false); + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); } - updateState?.(state, response); + updateState === null || updateState === void 0 ? void 0 : updateState(state, response); } } //# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/browser/poller/poller.js b/node_modules/@azure/core-lro/dist/browser/poller/poller.js index 5fc9b09df..4579ce70c 100644 --- a/node_modules/@azure/core-lro/dist/browser/poller/poller.js +++ b/node_modules/@azure/core-lro/dist/browser/poller/poller.js @@ -75,18 +75,18 @@ export function buildCreatePoller(inputs) { handlers.set(s, callback); return () => handlers.delete(s); }, - pollUntilDone: (pollOptions) => (resultPromise ??= (async () => { + pollUntilDone: (pollOptions) => (resultPromise !== null && resultPromise !== void 0 ? resultPromise : (resultPromise = (async () => { const { abortSignal: inputAbortSignal } = pollOptions || {}; // In the future we can use AbortSignal.any() instead function abortListener() { abortController.abort(); } const abortSignal = abortController.signal; - if (inputAbortSignal?.aborted) { + if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { abortController.abort(); } else if (!abortSignal.aborted) { - inputAbortSignal?.addEventListener("abort", abortListener, { once: true }); + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); } try { if (!poller.isDone()) { @@ -98,7 +98,7 @@ export function buildCreatePoller(inputs) { } } finally { - inputAbortSignal?.removeEventListener("abort", abortListener); + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); } if (resolveOnUnsuccessful) { return poller.getResult(); @@ -118,7 +118,7 @@ export function buildCreatePoller(inputs) { } })().finally(() => { resultPromise = undefined; - })), + }))), async poll(pollOptions) { if (resolveOnUnsuccessful) { if (poller.isDone()) diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/operation.js b/node_modules/@azure/core-lro/dist/commonjs/http/operation.js index c3fc6eddb..94786aa74 100644 --- a/node_modules/@azure/core-lro/dist/commonjs/http/operation.js +++ b/node_modules/@azure/core-lro/dist/commonjs/http/operation.js @@ -7,7 +7,7 @@ const operation_js_1 = require("../poller/operation.js"); const logger_js_1 = require("../logger.js"); function getOperationLocationPollingUrl(inputs) { const { azureAsyncOperation, operationLocation } = inputs; - return operationLocation ?? azureAsyncOperation; + return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; } function getLocationHeader(rawResponse) { return rawResponse.headers["location"]; @@ -19,6 +19,7 @@ function getAzureAsyncOperationHeader(rawResponse) { return rawResponse.headers["azure-asyncoperation"]; } function findResourceLocation(inputs) { + var _a; const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; switch (requestMethod) { case "PUT": { @@ -28,7 +29,7 @@ function findResourceLocation(inputs) { return undefined; } case "PATCH": { - return getDefault() ?? requestPath; + return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath; } default: { return getDefault(); @@ -55,7 +56,7 @@ function inferLroMode(inputs) { const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); const location = getLocationHeader(rawResponse); - const normalizedRequestMethod = requestMethod?.toLocaleUpperCase(); + const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); if (pollingUrl !== undefined) { return { mode: "OperationLocation", @@ -90,7 +91,7 @@ function transformStatus(inputs) { if (typeof status !== "string" && status !== undefined) { throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); } - switch (status?.toLocaleLowerCase()) { + switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { case undefined: return toOperationStatus(statusCode); case "succeeded": @@ -113,12 +114,14 @@ function transformStatus(inputs) { } } function getStatus(rawResponse) { - const { status } = rawResponse.body ?? {}; + var _a; + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; return transformStatus({ status, statusCode: rawResponse.statusCode }); } function getProvisioningState(rawResponse) { - const { properties, provisioningState } = rawResponse.body ?? {}; - const status = properties?.provisioningState ?? provisioningState; + var _a, _b; + const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; return transformStatus({ status, statusCode: rawResponse.statusCode }); } function toOperationStatus(statusCode) { @@ -168,7 +171,8 @@ function calculatePollingIntervalFromDate(retryAfterDate) { function getStatusFromInitialResponse(inputs) { const { response, state, operationLocation } = inputs; function helper() { - const mode = state.config.metadata?.["mode"]; + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case undefined: return toOperationStatus(response.rawResponse.statusCode); @@ -196,12 +200,7 @@ async function initHttpOperation(inputs) { requestMethod: lro.requestMethod, resourceLocationConfig, }); - return { - response, - operationLocation: config?.operationLocation, - resourceLocation: config?.resourceLocation, - ...(config?.mode ? { metadata: { mode: config.mode } } : {}), - }; + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); }, stateProxy, processResult: processResult @@ -213,7 +212,8 @@ async function initHttpOperation(inputs) { } exports.initHttpOperation = initHttpOperation; function getOperationLocation({ rawResponse }, state) { - const mode = state.config.metadata?.["mode"]; + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case "OperationLocation": { return getOperationLocationPollingUrl({ @@ -232,7 +232,8 @@ function getOperationLocation({ rawResponse }, state) { } exports.getOperationLocation = getOperationLocation; function getOperationStatus({ rawResponse }, state) { - const mode = state.config.metadata?.["mode"]; + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case "OperationLocation": { return getStatus(rawResponse); @@ -249,7 +250,8 @@ function getOperationStatus({ rawResponse }, state) { } exports.getOperationStatus = getOperationStatus; function accessBodyProperty({ flatResponse, rawResponse }, prop) { - return flatResponse?.[prop] ?? rawResponse.body?.[prop]; + var _a, _b; + return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; } function getResourceLocation(res, state) { const loc = accessBodyProperty(res, "resourceLocation"); diff --git a/node_modules/@azure/core-lro/dist/commonjs/http/poller.js b/node_modules/@azure/core-lro/dist/commonjs/http/poller.js index 52d6606cb..94c8b98ed 100644 --- a/node_modules/@azure/core-lro/dist/commonjs/http/poller.js +++ b/node_modules/@azure/core-lro/dist/commonjs/http/poller.js @@ -31,12 +31,7 @@ async function createHttpPoller(lro, options) { requestMethod: lro.requestMethod, resourceLocationConfig, }); - return { - response, - operationLocation: config?.operationLocation, - resourceLocation: config?.resourceLocation, - ...(config?.mode ? { metadata: { mode: config.mode } } : {}), - }; + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); }, poll: lro.sendPollRequest, }, { diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js index ed5cd5709..eaf209fcc 100644 --- a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/lroEngine.js @@ -11,7 +11,6 @@ const operation_js_2 = require("../../poller/operation.js"); * The LRO Engine, a class that performs polling. */ class LroEngine extends poller_js_1.Poller { - config; constructor(lro, options) { const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; const state = resumeFrom diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js index 16b75c76c..44c935ac4 100644 --- a/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/lroEngine/operation.js @@ -23,14 +23,6 @@ const createStateProxy = () => ({ isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), }); class GenericPollOperation { - state; - lro; - setErrorAsResult; - lroResourceLocationConfig; - processResult; - updateState; - isDone; - pollerConfig; constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { this.state = state; this.lro = lro; @@ -44,18 +36,16 @@ class GenericPollOperation { this.pollerConfig = pollerConfig; } async update(options) { + var _a; const stateProxy = createStateProxy(); if (!this.state.isStarted) { - this.state = { - ...this.state, - ...(await (0, operation_js_1.initHttpOperation)({ - lro: this.lro, - stateProxy, - resourceLocationConfig: this.lroResourceLocationConfig, - processResult: this.processResult, - setErrorAsResult: this.setErrorAsResult, - })), - }; + this.state = Object.assign(Object.assign({}, this.state), (await (0, operation_js_1.initHttpOperation)({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult, + }))); } const updateState = this.updateState; const isDone = this.isDone; @@ -78,7 +68,7 @@ class GenericPollOperation { setErrorAsResult: this.setErrorAsResult, }); } - options?.fireProgress?.(this.state); + (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); return this; } async cancel() { diff --git a/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js b/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js index 1e27a020e..b7964a985 100644 --- a/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js +++ b/node_modules/@azure/core-lro/dist/commonjs/legacy/poller.js @@ -90,20 +90,6 @@ exports.PollerCancelledError = PollerCancelledError; */ // eslint-disable-next-line no-use-before-define class Poller { - /** controls whether to throw an error if the operation failed or was canceled. */ - resolveOnUnsuccessful = false; - stopped = true; - resolve; - reject; - pollOncePromise; - cancelPromise; - promise; - pollProgressCallbacks = []; - /** - * The poller's operation is available in full to any of the methods of the Poller class - * and any class extending the Poller class. - */ - operation; /** * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. * @@ -170,6 +156,10 @@ class Poller { * @param operation - Must contain the basic properties of `PollOperation`. */ constructor(operation) { + /** controls whether to throw an error if the operation failed or was canceled. */ + this.resolveOnUnsuccessful = false; + this.stopped = true; + this.pollProgressCallbacks = []; this.operation = operation; this.promise = new Promise((resolve, reject) => { this.resolve = resolve; diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/operation.js b/node_modules/@azure/core-lro/dist/commonjs/poller/operation.js index d050c61e0..55b1673ab 100644 --- a/node_modules/@azure/core-lro/dist/commonjs/poller/operation.js +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/operation.js @@ -56,7 +56,7 @@ function processOperationStatus(result) { break; } case "failed": { - const err = getError?.(response); + const err = getError === null || getError === void 0 ? void 0 : getError(response); let postfix = ""; if (err) { const { code, message } = simplifyError(err); @@ -73,7 +73,7 @@ function processOperationStatus(result) { break; } } - if (isDone?.(response, state) || + if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || (isDone === undefined && ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { stateProxy.setResult(state, buildResult({ @@ -94,7 +94,7 @@ async function initOperation(inputs) { const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; const { operationLocation, resourceLocation, metadata, response } = await init(); if (operationLocation) - withOperationLocation?.(operationLocation, false); + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); const config = { metadata, operationLocation, @@ -153,19 +153,19 @@ async function pollOperation(inputs) { setErrorAsResult, }); if (!constants_js_1.terminalStates.includes(status)) { - const intervalInMs = getPollingInterval?.(response); + const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); if (intervalInMs) setDelay(intervalInMs); - const location = getOperationLocation?.(response, state); + const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); if (location !== undefined) { const isUpdated = operationLocation !== location; state.config.operationLocation = location; - withOperationLocation?.(location, isUpdated); + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); } else - withOperationLocation?.(operationLocation, false); + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); } - updateState?.(state, response); + updateState === null || updateState === void 0 ? void 0 : updateState(state, response); } } exports.pollOperation = pollOperation; diff --git a/node_modules/@azure/core-lro/dist/commonjs/poller/poller.js b/node_modules/@azure/core-lro/dist/commonjs/poller/poller.js index 8df8b88e5..776b87840 100644 --- a/node_modules/@azure/core-lro/dist/commonjs/poller/poller.js +++ b/node_modules/@azure/core-lro/dist/commonjs/poller/poller.js @@ -78,18 +78,18 @@ function buildCreatePoller(inputs) { handlers.set(s, callback); return () => handlers.delete(s); }, - pollUntilDone: (pollOptions) => (resultPromise ??= (async () => { + pollUntilDone: (pollOptions) => (resultPromise !== null && resultPromise !== void 0 ? resultPromise : (resultPromise = (async () => { const { abortSignal: inputAbortSignal } = pollOptions || {}; // In the future we can use AbortSignal.any() instead function abortListener() { abortController.abort(); } const abortSignal = abortController.signal; - if (inputAbortSignal?.aborted) { + if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { abortController.abort(); } else if (!abortSignal.aborted) { - inputAbortSignal?.addEventListener("abort", abortListener, { once: true }); + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); } try { if (!poller.isDone()) { @@ -101,7 +101,7 @@ function buildCreatePoller(inputs) { } } finally { - inputAbortSignal?.removeEventListener("abort", abortListener); + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); } if (resolveOnUnsuccessful) { return poller.getResult(); @@ -121,7 +121,7 @@ function buildCreatePoller(inputs) { } })().finally(() => { resultPromise = undefined; - })), + }))), async poll(pollOptions) { if (resolveOnUnsuccessful) { if (poller.isDone()) diff --git a/node_modules/@azure/core-lro/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-lro/dist/commonjs/tsdoc-metadata.json index 22735db17..6305f1798 100644 --- a/node_modules/@azure/core-lro/dist/commonjs/tsdoc-metadata.json +++ b/node_modules/@azure/core-lro/dist/commonjs/tsdoc-metadata.json @@ -5,7 +5,7 @@ "toolPackages": [ { "packageName": "@microsoft/api-extractor", - "packageVersion": "7.42.3" + "packageVersion": "7.43.1" } ] } diff --git a/node_modules/@azure/core-lro/dist/esm/http/operation.js b/node_modules/@azure/core-lro/dist/esm/http/operation.js index 038384478..e31ca08c9 100644 --- a/node_modules/@azure/core-lro/dist/esm/http/operation.js +++ b/node_modules/@azure/core-lro/dist/esm/http/operation.js @@ -4,7 +4,7 @@ import { initOperation, pollOperation } from "../poller/operation.js"; import { logger } from "../logger.js"; function getOperationLocationPollingUrl(inputs) { const { azureAsyncOperation, operationLocation } = inputs; - return operationLocation ?? azureAsyncOperation; + return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; } function getLocationHeader(rawResponse) { return rawResponse.headers["location"]; @@ -16,6 +16,7 @@ function getAzureAsyncOperationHeader(rawResponse) { return rawResponse.headers["azure-asyncoperation"]; } function findResourceLocation(inputs) { + var _a; const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; switch (requestMethod) { case "PUT": { @@ -25,7 +26,7 @@ function findResourceLocation(inputs) { return undefined; } case "PATCH": { - return getDefault() ?? requestPath; + return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath; } default: { return getDefault(); @@ -52,7 +53,7 @@ export function inferLroMode(inputs) { const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); const location = getLocationHeader(rawResponse); - const normalizedRequestMethod = requestMethod?.toLocaleUpperCase(); + const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); if (pollingUrl !== undefined) { return { mode: "OperationLocation", @@ -86,7 +87,7 @@ function transformStatus(inputs) { if (typeof status !== "string" && status !== undefined) { throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); } - switch (status?.toLocaleLowerCase()) { + switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { case undefined: return toOperationStatus(statusCode); case "succeeded": @@ -109,12 +110,14 @@ function transformStatus(inputs) { } } function getStatus(rawResponse) { - const { status } = rawResponse.body ?? {}; + var _a; + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; return transformStatus({ status, statusCode: rawResponse.statusCode }); } function getProvisioningState(rawResponse) { - const { properties, provisioningState } = rawResponse.body ?? {}; - const status = properties?.provisioningState ?? provisioningState; + var _a, _b; + const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; return transformStatus({ status, statusCode: rawResponse.statusCode }); } function toOperationStatus(statusCode) { @@ -162,7 +165,8 @@ function calculatePollingIntervalFromDate(retryAfterDate) { export function getStatusFromInitialResponse(inputs) { const { response, state, operationLocation } = inputs; function helper() { - const mode = state.config.metadata?.["mode"]; + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case undefined: return toOperationStatus(response.rawResponse.statusCode); @@ -189,12 +193,7 @@ export async function initHttpOperation(inputs) { requestMethod: lro.requestMethod, resourceLocationConfig, }); - return { - response, - operationLocation: config?.operationLocation, - resourceLocation: config?.resourceLocation, - ...(config?.mode ? { metadata: { mode: config.mode } } : {}), - }; + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); }, stateProxy, processResult: processResult @@ -205,7 +204,8 @@ export async function initHttpOperation(inputs) { }); } export function getOperationLocation({ rawResponse }, state) { - const mode = state.config.metadata?.["mode"]; + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case "OperationLocation": { return getOperationLocationPollingUrl({ @@ -223,7 +223,8 @@ export function getOperationLocation({ rawResponse }, state) { } } export function getOperationStatus({ rawResponse }, state) { - const mode = state.config.metadata?.["mode"]; + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case "OperationLocation": { return getStatus(rawResponse); @@ -239,7 +240,8 @@ export function getOperationStatus({ rawResponse }, state) { } } function accessBodyProperty({ flatResponse, rawResponse }, prop) { - return flatResponse?.[prop] ?? rawResponse.body?.[prop]; + var _a, _b; + return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; } export function getResourceLocation(res, state) { const loc = accessBodyProperty(res, "resourceLocation"); diff --git a/node_modules/@azure/core-lro/dist/esm/http/poller.js b/node_modules/@azure/core-lro/dist/esm/http/poller.js index 84379fdd7..ce3aa2b79 100644 --- a/node_modules/@azure/core-lro/dist/esm/http/poller.js +++ b/node_modules/@azure/core-lro/dist/esm/http/poller.js @@ -28,12 +28,7 @@ export async function createHttpPoller(lro, options) { requestMethod: lro.requestMethod, resourceLocationConfig, }); - return { - response, - operationLocation: config?.operationLocation, - resourceLocation: config?.resourceLocation, - ...(config?.mode ? { metadata: { mode: config.mode } } : {}), - }; + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); }, poll: lro.sendPollRequest, }, { diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.js b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.js index 351691a7b..7d247af7e 100644 --- a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.js +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/lroEngine.js @@ -8,7 +8,6 @@ import { deserializeState } from "../../poller/operation.js"; * The LRO Engine, a class that performs polling. */ export class LroEngine extends Poller { - config; constructor(lro, options) { const { intervalInMs = POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; const state = resumeFrom diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.js b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.js index 4051ad80d..a073fb044 100644 --- a/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.js +++ b/node_modules/@azure/core-lro/dist/esm/legacy/lroEngine/operation.js @@ -20,14 +20,6 @@ const createStateProxy = () => ({ isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), }); export class GenericPollOperation { - state; - lro; - setErrorAsResult; - lroResourceLocationConfig; - processResult; - updateState; - isDone; - pollerConfig; constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { this.state = state; this.lro = lro; @@ -41,18 +33,16 @@ export class GenericPollOperation { this.pollerConfig = pollerConfig; } async update(options) { + var _a; const stateProxy = createStateProxy(); if (!this.state.isStarted) { - this.state = { - ...this.state, - ...(await initHttpOperation({ - lro: this.lro, - stateProxy, - resourceLocationConfig: this.lroResourceLocationConfig, - processResult: this.processResult, - setErrorAsResult: this.setErrorAsResult, - })), - }; + this.state = Object.assign(Object.assign({}, this.state), (await initHttpOperation({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult, + }))); } const updateState = this.updateState; const isDone = this.isDone; @@ -75,7 +65,7 @@ export class GenericPollOperation { setErrorAsResult: this.setErrorAsResult, }); } - options?.fireProgress?.(this.state); + (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); return this; } async cancel() { diff --git a/node_modules/@azure/core-lro/dist/esm/legacy/poller.js b/node_modules/@azure/core-lro/dist/esm/legacy/poller.js index de6f912e6..4335b2fb1 100644 --- a/node_modules/@azure/core-lro/dist/esm/legacy/poller.js +++ b/node_modules/@azure/core-lro/dist/esm/legacy/poller.js @@ -85,20 +85,6 @@ export class PollerCancelledError extends Error { */ // eslint-disable-next-line no-use-before-define export class Poller { - /** controls whether to throw an error if the operation failed or was canceled. */ - resolveOnUnsuccessful = false; - stopped = true; - resolve; - reject; - pollOncePromise; - cancelPromise; - promise; - pollProgressCallbacks = []; - /** - * The poller's operation is available in full to any of the methods of the Poller class - * and any class extending the Poller class. - */ - operation; /** * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. * @@ -165,6 +151,10 @@ export class Poller { * @param operation - Must contain the basic properties of `PollOperation`. */ constructor(operation) { + /** controls whether to throw an error if the operation failed or was canceled. */ + this.resolveOnUnsuccessful = false; + this.stopped = true; + this.pollProgressCallbacks = []; this.operation = operation; this.promise = new Promise((resolve, reject) => { this.resolve = resolve; diff --git a/node_modules/@azure/core-lro/dist/esm/poller/operation.js b/node_modules/@azure/core-lro/dist/esm/poller/operation.js index 869eaa528..90996db36 100644 --- a/node_modules/@azure/core-lro/dist/esm/poller/operation.js +++ b/node_modules/@azure/core-lro/dist/esm/poller/operation.js @@ -52,7 +52,7 @@ function processOperationStatus(result) { break; } case "failed": { - const err = getError?.(response); + const err = getError === null || getError === void 0 ? void 0 : getError(response); let postfix = ""; if (err) { const { code, message } = simplifyError(err); @@ -69,7 +69,7 @@ function processOperationStatus(result) { break; } } - if (isDone?.(response, state) || + if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || (isDone === undefined && ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { stateProxy.setResult(state, buildResult({ @@ -90,7 +90,7 @@ export async function initOperation(inputs) { const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; const { operationLocation, resourceLocation, metadata, response } = await init(); if (operationLocation) - withOperationLocation?.(operationLocation, false); + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); const config = { metadata, operationLocation, @@ -148,19 +148,19 @@ export async function pollOperation(inputs) { setErrorAsResult, }); if (!terminalStates.includes(status)) { - const intervalInMs = getPollingInterval?.(response); + const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); if (intervalInMs) setDelay(intervalInMs); - const location = getOperationLocation?.(response, state); + const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); if (location !== undefined) { const isUpdated = operationLocation !== location; state.config.operationLocation = location; - withOperationLocation?.(location, isUpdated); + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); } else - withOperationLocation?.(operationLocation, false); + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); } - updateState?.(state, response); + updateState === null || updateState === void 0 ? void 0 : updateState(state, response); } } //# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/esm/poller/poller.js b/node_modules/@azure/core-lro/dist/esm/poller/poller.js index 5fc9b09df..4579ce70c 100644 --- a/node_modules/@azure/core-lro/dist/esm/poller/poller.js +++ b/node_modules/@azure/core-lro/dist/esm/poller/poller.js @@ -75,18 +75,18 @@ export function buildCreatePoller(inputs) { handlers.set(s, callback); return () => handlers.delete(s); }, - pollUntilDone: (pollOptions) => (resultPromise ??= (async () => { + pollUntilDone: (pollOptions) => (resultPromise !== null && resultPromise !== void 0 ? resultPromise : (resultPromise = (async () => { const { abortSignal: inputAbortSignal } = pollOptions || {}; // In the future we can use AbortSignal.any() instead function abortListener() { abortController.abort(); } const abortSignal = abortController.signal; - if (inputAbortSignal?.aborted) { + if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { abortController.abort(); } else if (!abortSignal.aborted) { - inputAbortSignal?.addEventListener("abort", abortListener, { once: true }); + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); } try { if (!poller.isDone()) { @@ -98,7 +98,7 @@ export function buildCreatePoller(inputs) { } } finally { - inputAbortSignal?.removeEventListener("abort", abortListener); + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); } if (resolveOnUnsuccessful) { return poller.getResult(); @@ -118,7 +118,7 @@ export function buildCreatePoller(inputs) { } })().finally(() => { resultPromise = undefined; - })), + }))), async poll(pollOptions) { if (resolveOnUnsuccessful) { if (poller.isDone()) diff --git a/node_modules/@azure/core-lro/dist/react-native/http/operation.js b/node_modules/@azure/core-lro/dist/react-native/http/operation.js index 038384478..e31ca08c9 100644 --- a/node_modules/@azure/core-lro/dist/react-native/http/operation.js +++ b/node_modules/@azure/core-lro/dist/react-native/http/operation.js @@ -4,7 +4,7 @@ import { initOperation, pollOperation } from "../poller/operation.js"; import { logger } from "../logger.js"; function getOperationLocationPollingUrl(inputs) { const { azureAsyncOperation, operationLocation } = inputs; - return operationLocation ?? azureAsyncOperation; + return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; } function getLocationHeader(rawResponse) { return rawResponse.headers["location"]; @@ -16,6 +16,7 @@ function getAzureAsyncOperationHeader(rawResponse) { return rawResponse.headers["azure-asyncoperation"]; } function findResourceLocation(inputs) { + var _a; const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; switch (requestMethod) { case "PUT": { @@ -25,7 +26,7 @@ function findResourceLocation(inputs) { return undefined; } case "PATCH": { - return getDefault() ?? requestPath; + return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath; } default: { return getDefault(); @@ -52,7 +53,7 @@ export function inferLroMode(inputs) { const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); const location = getLocationHeader(rawResponse); - const normalizedRequestMethod = requestMethod?.toLocaleUpperCase(); + const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); if (pollingUrl !== undefined) { return { mode: "OperationLocation", @@ -86,7 +87,7 @@ function transformStatus(inputs) { if (typeof status !== "string" && status !== undefined) { throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); } - switch (status?.toLocaleLowerCase()) { + switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { case undefined: return toOperationStatus(statusCode); case "succeeded": @@ -109,12 +110,14 @@ function transformStatus(inputs) { } } function getStatus(rawResponse) { - const { status } = rawResponse.body ?? {}; + var _a; + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; return transformStatus({ status, statusCode: rawResponse.statusCode }); } function getProvisioningState(rawResponse) { - const { properties, provisioningState } = rawResponse.body ?? {}; - const status = properties?.provisioningState ?? provisioningState; + var _a, _b; + const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; return transformStatus({ status, statusCode: rawResponse.statusCode }); } function toOperationStatus(statusCode) { @@ -162,7 +165,8 @@ function calculatePollingIntervalFromDate(retryAfterDate) { export function getStatusFromInitialResponse(inputs) { const { response, state, operationLocation } = inputs; function helper() { - const mode = state.config.metadata?.["mode"]; + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case undefined: return toOperationStatus(response.rawResponse.statusCode); @@ -189,12 +193,7 @@ export async function initHttpOperation(inputs) { requestMethod: lro.requestMethod, resourceLocationConfig, }); - return { - response, - operationLocation: config?.operationLocation, - resourceLocation: config?.resourceLocation, - ...(config?.mode ? { metadata: { mode: config.mode } } : {}), - }; + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); }, stateProxy, processResult: processResult @@ -205,7 +204,8 @@ export async function initHttpOperation(inputs) { }); } export function getOperationLocation({ rawResponse }, state) { - const mode = state.config.metadata?.["mode"]; + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case "OperationLocation": { return getOperationLocationPollingUrl({ @@ -223,7 +223,8 @@ export function getOperationLocation({ rawResponse }, state) { } } export function getOperationStatus({ rawResponse }, state) { - const mode = state.config.metadata?.["mode"]; + var _a; + const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case "OperationLocation": { return getStatus(rawResponse); @@ -239,7 +240,8 @@ export function getOperationStatus({ rawResponse }, state) { } } function accessBodyProperty({ flatResponse, rawResponse }, prop) { - return flatResponse?.[prop] ?? rawResponse.body?.[prop]; + var _a, _b; + return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; } export function getResourceLocation(res, state) { const loc = accessBodyProperty(res, "resourceLocation"); diff --git a/node_modules/@azure/core-lro/dist/react-native/http/poller.js b/node_modules/@azure/core-lro/dist/react-native/http/poller.js index 84379fdd7..ce3aa2b79 100644 --- a/node_modules/@azure/core-lro/dist/react-native/http/poller.js +++ b/node_modules/@azure/core-lro/dist/react-native/http/poller.js @@ -28,12 +28,7 @@ export async function createHttpPoller(lro, options) { requestMethod: lro.requestMethod, resourceLocationConfig, }); - return { - response, - operationLocation: config?.operationLocation, - resourceLocation: config?.resourceLocation, - ...(config?.mode ? { metadata: { mode: config.mode } } : {}), - }; + return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); }, poll: lro.sendPollRequest, }, { diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.js b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.js index 351691a7b..7d247af7e 100644 --- a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.js +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/lroEngine.js @@ -8,7 +8,6 @@ import { deserializeState } from "../../poller/operation.js"; * The LRO Engine, a class that performs polling. */ export class LroEngine extends Poller { - config; constructor(lro, options) { const { intervalInMs = POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; const state = resumeFrom diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.js b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.js index 4051ad80d..a073fb044 100644 --- a/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.js +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/lroEngine/operation.js @@ -20,14 +20,6 @@ const createStateProxy = () => ({ isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), }); export class GenericPollOperation { - state; - lro; - setErrorAsResult; - lroResourceLocationConfig; - processResult; - updateState; - isDone; - pollerConfig; constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { this.state = state; this.lro = lro; @@ -41,18 +33,16 @@ export class GenericPollOperation { this.pollerConfig = pollerConfig; } async update(options) { + var _a; const stateProxy = createStateProxy(); if (!this.state.isStarted) { - this.state = { - ...this.state, - ...(await initHttpOperation({ - lro: this.lro, - stateProxy, - resourceLocationConfig: this.lroResourceLocationConfig, - processResult: this.processResult, - setErrorAsResult: this.setErrorAsResult, - })), - }; + this.state = Object.assign(Object.assign({}, this.state), (await initHttpOperation({ + lro: this.lro, + stateProxy, + resourceLocationConfig: this.lroResourceLocationConfig, + processResult: this.processResult, + setErrorAsResult: this.setErrorAsResult, + }))); } const updateState = this.updateState; const isDone = this.isDone; @@ -75,7 +65,7 @@ export class GenericPollOperation { setErrorAsResult: this.setErrorAsResult, }); } - options?.fireProgress?.(this.state); + (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); return this; } async cancel() { diff --git a/node_modules/@azure/core-lro/dist/react-native/legacy/poller.js b/node_modules/@azure/core-lro/dist/react-native/legacy/poller.js index de6f912e6..4335b2fb1 100644 --- a/node_modules/@azure/core-lro/dist/react-native/legacy/poller.js +++ b/node_modules/@azure/core-lro/dist/react-native/legacy/poller.js @@ -85,20 +85,6 @@ export class PollerCancelledError extends Error { */ // eslint-disable-next-line no-use-before-define export class Poller { - /** controls whether to throw an error if the operation failed or was canceled. */ - resolveOnUnsuccessful = false; - stopped = true; - resolve; - reject; - pollOncePromise; - cancelPromise; - promise; - pollProgressCallbacks = []; - /** - * The poller's operation is available in full to any of the methods of the Poller class - * and any class extending the Poller class. - */ - operation; /** * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. * @@ -165,6 +151,10 @@ export class Poller { * @param operation - Must contain the basic properties of `PollOperation`. */ constructor(operation) { + /** controls whether to throw an error if the operation failed or was canceled. */ + this.resolveOnUnsuccessful = false; + this.stopped = true; + this.pollProgressCallbacks = []; this.operation = operation; this.promise = new Promise((resolve, reject) => { this.resolve = resolve; diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/operation.js b/node_modules/@azure/core-lro/dist/react-native/poller/operation.js index 869eaa528..90996db36 100644 --- a/node_modules/@azure/core-lro/dist/react-native/poller/operation.js +++ b/node_modules/@azure/core-lro/dist/react-native/poller/operation.js @@ -52,7 +52,7 @@ function processOperationStatus(result) { break; } case "failed": { - const err = getError?.(response); + const err = getError === null || getError === void 0 ? void 0 : getError(response); let postfix = ""; if (err) { const { code, message } = simplifyError(err); @@ -69,7 +69,7 @@ function processOperationStatus(result) { break; } } - if (isDone?.(response, state) || + if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || (isDone === undefined && ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { stateProxy.setResult(state, buildResult({ @@ -90,7 +90,7 @@ export async function initOperation(inputs) { const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; const { operationLocation, resourceLocation, metadata, response } = await init(); if (operationLocation) - withOperationLocation?.(operationLocation, false); + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); const config = { metadata, operationLocation, @@ -148,19 +148,19 @@ export async function pollOperation(inputs) { setErrorAsResult, }); if (!terminalStates.includes(status)) { - const intervalInMs = getPollingInterval?.(response); + const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); if (intervalInMs) setDelay(intervalInMs); - const location = getOperationLocation?.(response, state); + const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); if (location !== undefined) { const isUpdated = operationLocation !== location; state.config.operationLocation = location; - withOperationLocation?.(location, isUpdated); + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); } else - withOperationLocation?.(operationLocation, false); + withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); } - updateState?.(state, response); + updateState === null || updateState === void 0 ? void 0 : updateState(state, response); } } //# sourceMappingURL=operation.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-lro/dist/react-native/poller/poller.js b/node_modules/@azure/core-lro/dist/react-native/poller/poller.js index 5fc9b09df..4579ce70c 100644 --- a/node_modules/@azure/core-lro/dist/react-native/poller/poller.js +++ b/node_modules/@azure/core-lro/dist/react-native/poller/poller.js @@ -75,18 +75,18 @@ export function buildCreatePoller(inputs) { handlers.set(s, callback); return () => handlers.delete(s); }, - pollUntilDone: (pollOptions) => (resultPromise ??= (async () => { + pollUntilDone: (pollOptions) => (resultPromise !== null && resultPromise !== void 0 ? resultPromise : (resultPromise = (async () => { const { abortSignal: inputAbortSignal } = pollOptions || {}; // In the future we can use AbortSignal.any() instead function abortListener() { abortController.abort(); } const abortSignal = abortController.signal; - if (inputAbortSignal?.aborted) { + if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { abortController.abort(); } else if (!abortSignal.aborted) { - inputAbortSignal?.addEventListener("abort", abortListener, { once: true }); + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); } try { if (!poller.isDone()) { @@ -98,7 +98,7 @@ export function buildCreatePoller(inputs) { } } finally { - inputAbortSignal?.removeEventListener("abort", abortListener); + inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); } if (resolveOnUnsuccessful) { return poller.getResult(); @@ -118,7 +118,7 @@ export function buildCreatePoller(inputs) { } })().finally(() => { resultPromise = undefined; - })), + }))), async poll(pollOptions) { if (resolveOnUnsuccessful) { if (poller.isDone()) diff --git a/node_modules/@azure/core-lro/package.json b/node_modules/@azure/core-lro/package.json index ac10fd5df..ee2ad8e3c 100644 --- a/node_modules/@azure/core-lro/package.json +++ b/node_modules/@azure/core-lro/package.json @@ -3,7 +3,7 @@ "author": "Microsoft Corporation", "sdk-type": "client", "type": "module", - "version": "2.7.1", + "version": "2.7.2", "description": "Isomorphic client library for supporting long-running operations in node.js and browser.", "exports": { "./package.json": "./package.json", @@ -104,7 +104,7 @@ "playwright": "^1.41.2", "prettier": "^3.2.5", "rimraf": "^5.0.5", - "tshy": "^1.11.1", + "tshy": "^1.13.0", "typescript": "~5.3.3", "vitest": "^1.3.1" }, diff --git a/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.js b/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.js index d9db4b998..f21132b34 100644 --- a/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.js +++ b/node_modules/@azure/core-paging/dist/browser/getPagedAsyncIterator.js @@ -1,5 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; /** * returns an async iterator that iterates over results. It also has a `byPage` * method that returns pages of items at once. @@ -8,6 +9,7 @@ * @returns a paged async iterator that iterates over results. */ export function getPagedAsyncIterator(pagedResult) { + var _a; const iter = getItemAsyncIterator(pagedResult); return { next() { @@ -16,57 +18,85 @@ export function getPagedAsyncIterator(pagedResult) { [Symbol.asyncIterator]() { return this; }, - byPage: pagedResult?.byPage ?? - ((settings) => { - const { continuationToken, maxPageSize } = settings ?? {}; - return getPageAsyncIterator(pagedResult, { - pageLink: continuationToken, - maxPageSize, - }); - }), + byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { + const { continuationToken, maxPageSize } = settings !== null && settings !== void 0 ? settings : {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken, + maxPageSize, + }); + }), }; } -async function* getItemAsyncIterator(pagedResult) { - const pages = getPageAsyncIterator(pagedResult); - const firstVal = await pages.next(); - // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is - if (!Array.isArray(firstVal.value)) { - // can extract elements from this page - const { toElements } = pagedResult; - if (toElements) { - yield* toElements(firstVal.value); - for await (const page of pages) { - yield* toElements(page); +function getItemAsyncIterator(pagedResult) { + return __asyncGenerator(this, arguments, function* getItemAsyncIterator_1() { + var _a, e_1, _b, _c, _d, e_2, _e, _f; + const pages = getPageAsyncIterator(pagedResult); + const firstVal = yield __await(pages.next()); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + // can extract elements from this page + const { toElements } = pagedResult; + if (toElements) { + yield __await(yield* __asyncDelegator(__asyncValues(toElements(firstVal.value)))); + try { + for (var _g = true, pages_1 = __asyncValues(pages), pages_1_1; pages_1_1 = yield __await(pages_1.next()), _a = pages_1_1.done, !_a; _g = true) { + _c = pages_1_1.value; + _g = false; + const page = _c; + yield __await(yield* __asyncDelegator(__asyncValues(toElements(page)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_g && !_a && (_b = pages_1.return)) yield __await(_b.call(pages_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + else { + yield yield __await(firstVal.value); + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield __await(yield* __asyncDelegator(__asyncValues(pages))); } } else { - yield firstVal.value; - // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case - yield* pages; - } - } - else { - yield* firstVal.value; - for await (const page of pages) { - // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, - // it must be the case that `TPage = TElement[]` - yield* page; + yield __await(yield* __asyncDelegator(__asyncValues(firstVal.value))); + try { + for (var _h = true, pages_2 = __asyncValues(pages), pages_2_1; pages_2_1 = yield __await(pages_2.next()), _d = pages_2_1.done, !_d; _h = true) { + _f = pages_2_1.value; + _h = false; + const page = _f; + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield __await(yield* __asyncDelegator(__asyncValues(page))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (!_h && !_d && (_e = pages_2.return)) yield __await(_e.call(pages_2)); + } + finally { if (e_2) throw e_2.error; } + } } - } + }); } -async function* getPageAsyncIterator(pagedResult, options = {}) { - const { pageLink, maxPageSize } = options; - let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize); - if (!response) { - return; - } - yield response.page; - while (response.nextPageLink) { - response = await pagedResult.getPage(response.nextPageLink, maxPageSize); +function getPageAsyncIterator(pagedResult, options = {}) { + return __asyncGenerator(this, arguments, function* getPageAsyncIterator_1() { + const { pageLink, maxPageSize } = options; + let response = yield __await(pagedResult.getPage(pageLink !== null && pageLink !== void 0 ? pageLink : pagedResult.firstPageLink, maxPageSize)); if (!response) { - return; + return yield __await(void 0); + } + yield yield __await(response.page); + while (response.nextPageLink) { + response = yield __await(pagedResult.getPage(response.nextPageLink, maxPageSize)); + if (!response) { + return yield __await(void 0); + } + yield yield __await(response.page); } - yield response.page; - } + }); } //# sourceMappingURL=getPagedAsyncIterator.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.js b/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.js index 6e3c89752..04caee657 100644 --- a/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.js +++ b/node_modules/@azure/core-paging/dist/commonjs/getPagedAsyncIterator.js @@ -3,6 +3,7 @@ // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", { value: true }); exports.getPagedAsyncIterator = void 0; +const tslib_1 = require("tslib"); /** * returns an async iterator that iterates over results. It also has a `byPage` * method that returns pages of items at once. @@ -11,6 +12,7 @@ exports.getPagedAsyncIterator = void 0; * @returns a paged async iterator that iterates over results. */ function getPagedAsyncIterator(pagedResult) { + var _a; const iter = getItemAsyncIterator(pagedResult); return { next() { @@ -19,58 +21,86 @@ function getPagedAsyncIterator(pagedResult) { [Symbol.asyncIterator]() { return this; }, - byPage: pagedResult?.byPage ?? - ((settings) => { - const { continuationToken, maxPageSize } = settings ?? {}; - return getPageAsyncIterator(pagedResult, { - pageLink: continuationToken, - maxPageSize, - }); - }), + byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { + const { continuationToken, maxPageSize } = settings !== null && settings !== void 0 ? settings : {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken, + maxPageSize, + }); + }), }; } exports.getPagedAsyncIterator = getPagedAsyncIterator; -async function* getItemAsyncIterator(pagedResult) { - const pages = getPageAsyncIterator(pagedResult); - const firstVal = await pages.next(); - // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is - if (!Array.isArray(firstVal.value)) { - // can extract elements from this page - const { toElements } = pagedResult; - if (toElements) { - yield* toElements(firstVal.value); - for await (const page of pages) { - yield* toElements(page); +function getItemAsyncIterator(pagedResult) { + return tslib_1.__asyncGenerator(this, arguments, function* getItemAsyncIterator_1() { + var _a, e_1, _b, _c, _d, e_2, _e, _f; + const pages = getPageAsyncIterator(pagedResult); + const firstVal = yield tslib_1.__await(pages.next()); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + // can extract elements from this page + const { toElements } = pagedResult; + if (toElements) { + yield tslib_1.__await(yield* tslib_1.__asyncDelegator(tslib_1.__asyncValues(toElements(firstVal.value)))); + try { + for (var _g = true, pages_1 = tslib_1.__asyncValues(pages), pages_1_1; pages_1_1 = yield tslib_1.__await(pages_1.next()), _a = pages_1_1.done, !_a; _g = true) { + _c = pages_1_1.value; + _g = false; + const page = _c; + yield tslib_1.__await(yield* tslib_1.__asyncDelegator(tslib_1.__asyncValues(toElements(page)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_g && !_a && (_b = pages_1.return)) yield tslib_1.__await(_b.call(pages_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + else { + yield yield tslib_1.__await(firstVal.value); + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield tslib_1.__await(yield* tslib_1.__asyncDelegator(tslib_1.__asyncValues(pages))); } } else { - yield firstVal.value; - // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case - yield* pages; - } - } - else { - yield* firstVal.value; - for await (const page of pages) { - // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, - // it must be the case that `TPage = TElement[]` - yield* page; + yield tslib_1.__await(yield* tslib_1.__asyncDelegator(tslib_1.__asyncValues(firstVal.value))); + try { + for (var _h = true, pages_2 = tslib_1.__asyncValues(pages), pages_2_1; pages_2_1 = yield tslib_1.__await(pages_2.next()), _d = pages_2_1.done, !_d; _h = true) { + _f = pages_2_1.value; + _h = false; + const page = _f; + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield tslib_1.__await(yield* tslib_1.__asyncDelegator(tslib_1.__asyncValues(page))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (!_h && !_d && (_e = pages_2.return)) yield tslib_1.__await(_e.call(pages_2)); + } + finally { if (e_2) throw e_2.error; } + } } - } + }); } -async function* getPageAsyncIterator(pagedResult, options = {}) { - const { pageLink, maxPageSize } = options; - let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize); - if (!response) { - return; - } - yield response.page; - while (response.nextPageLink) { - response = await pagedResult.getPage(response.nextPageLink, maxPageSize); +function getPageAsyncIterator(pagedResult, options = {}) { + return tslib_1.__asyncGenerator(this, arguments, function* getPageAsyncIterator_1() { + const { pageLink, maxPageSize } = options; + let response = yield tslib_1.__await(pagedResult.getPage(pageLink !== null && pageLink !== void 0 ? pageLink : pagedResult.firstPageLink, maxPageSize)); if (!response) { - return; + return yield tslib_1.__await(void 0); + } + yield yield tslib_1.__await(response.page); + while (response.nextPageLink) { + response = yield tslib_1.__await(pagedResult.getPage(response.nextPageLink, maxPageSize)); + if (!response) { + return yield tslib_1.__await(void 0); + } + yield yield tslib_1.__await(response.page); } - yield response.page; - } + }); } //# sourceMappingURL=getPagedAsyncIterator.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-paging/dist/commonjs/tsdoc-metadata.json index 22735db17..6305f1798 100644 --- a/node_modules/@azure/core-paging/dist/commonjs/tsdoc-metadata.json +++ b/node_modules/@azure/core-paging/dist/commonjs/tsdoc-metadata.json @@ -5,7 +5,7 @@ "toolPackages": [ { "packageName": "@microsoft/api-extractor", - "packageVersion": "7.42.3" + "packageVersion": "7.43.1" } ] } diff --git a/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.js b/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.js index d9db4b998..f21132b34 100644 --- a/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.js +++ b/node_modules/@azure/core-paging/dist/esm/getPagedAsyncIterator.js @@ -1,5 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; /** * returns an async iterator that iterates over results. It also has a `byPage` * method that returns pages of items at once. @@ -8,6 +9,7 @@ * @returns a paged async iterator that iterates over results. */ export function getPagedAsyncIterator(pagedResult) { + var _a; const iter = getItemAsyncIterator(pagedResult); return { next() { @@ -16,57 +18,85 @@ export function getPagedAsyncIterator(pagedResult) { [Symbol.asyncIterator]() { return this; }, - byPage: pagedResult?.byPage ?? - ((settings) => { - const { continuationToken, maxPageSize } = settings ?? {}; - return getPageAsyncIterator(pagedResult, { - pageLink: continuationToken, - maxPageSize, - }); - }), + byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { + const { continuationToken, maxPageSize } = settings !== null && settings !== void 0 ? settings : {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken, + maxPageSize, + }); + }), }; } -async function* getItemAsyncIterator(pagedResult) { - const pages = getPageAsyncIterator(pagedResult); - const firstVal = await pages.next(); - // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is - if (!Array.isArray(firstVal.value)) { - // can extract elements from this page - const { toElements } = pagedResult; - if (toElements) { - yield* toElements(firstVal.value); - for await (const page of pages) { - yield* toElements(page); +function getItemAsyncIterator(pagedResult) { + return __asyncGenerator(this, arguments, function* getItemAsyncIterator_1() { + var _a, e_1, _b, _c, _d, e_2, _e, _f; + const pages = getPageAsyncIterator(pagedResult); + const firstVal = yield __await(pages.next()); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + // can extract elements from this page + const { toElements } = pagedResult; + if (toElements) { + yield __await(yield* __asyncDelegator(__asyncValues(toElements(firstVal.value)))); + try { + for (var _g = true, pages_1 = __asyncValues(pages), pages_1_1; pages_1_1 = yield __await(pages_1.next()), _a = pages_1_1.done, !_a; _g = true) { + _c = pages_1_1.value; + _g = false; + const page = _c; + yield __await(yield* __asyncDelegator(__asyncValues(toElements(page)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_g && !_a && (_b = pages_1.return)) yield __await(_b.call(pages_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + else { + yield yield __await(firstVal.value); + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield __await(yield* __asyncDelegator(__asyncValues(pages))); } } else { - yield firstVal.value; - // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case - yield* pages; - } - } - else { - yield* firstVal.value; - for await (const page of pages) { - // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, - // it must be the case that `TPage = TElement[]` - yield* page; + yield __await(yield* __asyncDelegator(__asyncValues(firstVal.value))); + try { + for (var _h = true, pages_2 = __asyncValues(pages), pages_2_1; pages_2_1 = yield __await(pages_2.next()), _d = pages_2_1.done, !_d; _h = true) { + _f = pages_2_1.value; + _h = false; + const page = _f; + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield __await(yield* __asyncDelegator(__asyncValues(page))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (!_h && !_d && (_e = pages_2.return)) yield __await(_e.call(pages_2)); + } + finally { if (e_2) throw e_2.error; } + } } - } + }); } -async function* getPageAsyncIterator(pagedResult, options = {}) { - const { pageLink, maxPageSize } = options; - let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize); - if (!response) { - return; - } - yield response.page; - while (response.nextPageLink) { - response = await pagedResult.getPage(response.nextPageLink, maxPageSize); +function getPageAsyncIterator(pagedResult, options = {}) { + return __asyncGenerator(this, arguments, function* getPageAsyncIterator_1() { + const { pageLink, maxPageSize } = options; + let response = yield __await(pagedResult.getPage(pageLink !== null && pageLink !== void 0 ? pageLink : pagedResult.firstPageLink, maxPageSize)); if (!response) { - return; + return yield __await(void 0); + } + yield yield __await(response.page); + while (response.nextPageLink) { + response = yield __await(pagedResult.getPage(response.nextPageLink, maxPageSize)); + if (!response) { + return yield __await(void 0); + } + yield yield __await(response.page); } - yield response.page; - } + }); } //# sourceMappingURL=getPagedAsyncIterator.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.js b/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.js index d9db4b998..f21132b34 100644 --- a/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.js +++ b/node_modules/@azure/core-paging/dist/react-native/getPagedAsyncIterator.js @@ -1,5 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; /** * returns an async iterator that iterates over results. It also has a `byPage` * method that returns pages of items at once. @@ -8,6 +9,7 @@ * @returns a paged async iterator that iterates over results. */ export function getPagedAsyncIterator(pagedResult) { + var _a; const iter = getItemAsyncIterator(pagedResult); return { next() { @@ -16,57 +18,85 @@ export function getPagedAsyncIterator(pagedResult) { [Symbol.asyncIterator]() { return this; }, - byPage: pagedResult?.byPage ?? - ((settings) => { - const { continuationToken, maxPageSize } = settings ?? {}; - return getPageAsyncIterator(pagedResult, { - pageLink: continuationToken, - maxPageSize, - }); - }), + byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { + const { continuationToken, maxPageSize } = settings !== null && settings !== void 0 ? settings : {}; + return getPageAsyncIterator(pagedResult, { + pageLink: continuationToken, + maxPageSize, + }); + }), }; } -async function* getItemAsyncIterator(pagedResult) { - const pages = getPageAsyncIterator(pagedResult); - const firstVal = await pages.next(); - // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is - if (!Array.isArray(firstVal.value)) { - // can extract elements from this page - const { toElements } = pagedResult; - if (toElements) { - yield* toElements(firstVal.value); - for await (const page of pages) { - yield* toElements(page); +function getItemAsyncIterator(pagedResult) { + return __asyncGenerator(this, arguments, function* getItemAsyncIterator_1() { + var _a, e_1, _b, _c, _d, e_2, _e, _f; + const pages = getPageAsyncIterator(pagedResult); + const firstVal = yield __await(pages.next()); + // if the result does not have an array shape, i.e. TPage = TElement, then we return it as is + if (!Array.isArray(firstVal.value)) { + // can extract elements from this page + const { toElements } = pagedResult; + if (toElements) { + yield __await(yield* __asyncDelegator(__asyncValues(toElements(firstVal.value)))); + try { + for (var _g = true, pages_1 = __asyncValues(pages), pages_1_1; pages_1_1 = yield __await(pages_1.next()), _a = pages_1_1.done, !_a; _g = true) { + _c = pages_1_1.value; + _g = false; + const page = _c; + yield __await(yield* __asyncDelegator(__asyncValues(toElements(page)))); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_g && !_a && (_b = pages_1.return)) yield __await(_b.call(pages_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + else { + yield yield __await(firstVal.value); + // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case + yield __await(yield* __asyncDelegator(__asyncValues(pages))); } } else { - yield firstVal.value; - // `pages` is of type `AsyncIterableIterator` but TPage = TElement in this case - yield* pages; - } - } - else { - yield* firstVal.value; - for await (const page of pages) { - // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, - // it must be the case that `TPage = TElement[]` - yield* page; + yield __await(yield* __asyncDelegator(__asyncValues(firstVal.value))); + try { + for (var _h = true, pages_2 = __asyncValues(pages), pages_2_1; pages_2_1 = yield __await(pages_2.next()), _d = pages_2_1.done, !_d; _h = true) { + _f = pages_2_1.value; + _h = false; + const page = _f; + // pages is of type `AsyncIterableIterator` so `page` is of type `TPage`. In this branch, + // it must be the case that `TPage = TElement[]` + yield __await(yield* __asyncDelegator(__asyncValues(page))); + } + } + catch (e_2_1) { e_2 = { error: e_2_1 }; } + finally { + try { + if (!_h && !_d && (_e = pages_2.return)) yield __await(_e.call(pages_2)); + } + finally { if (e_2) throw e_2.error; } + } } - } + }); } -async function* getPageAsyncIterator(pagedResult, options = {}) { - const { pageLink, maxPageSize } = options; - let response = await pagedResult.getPage(pageLink ?? pagedResult.firstPageLink, maxPageSize); - if (!response) { - return; - } - yield response.page; - while (response.nextPageLink) { - response = await pagedResult.getPage(response.nextPageLink, maxPageSize); +function getPageAsyncIterator(pagedResult, options = {}) { + return __asyncGenerator(this, arguments, function* getPageAsyncIterator_1() { + const { pageLink, maxPageSize } = options; + let response = yield __await(pagedResult.getPage(pageLink !== null && pageLink !== void 0 ? pageLink : pagedResult.firstPageLink, maxPageSize)); if (!response) { - return; + return yield __await(void 0); + } + yield yield __await(response.page); + while (response.nextPageLink) { + response = yield __await(pagedResult.getPage(response.nextPageLink, maxPageSize)); + if (!response) { + return yield __await(void 0); + } + yield yield __await(response.page); } - yield response.page; - } + }); } //# sourceMappingURL=getPagedAsyncIterator.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-paging/package.json b/node_modules/@azure/core-paging/package.json index 3ec4ece1e..d909d7e27 100644 --- a/node_modules/@azure/core-paging/package.json +++ b/node_modules/@azure/core-paging/package.json @@ -2,7 +2,7 @@ "name": "@azure/core-paging", "author": "Microsoft Corporation", "sdk-type": "client", - "version": "1.6.1", + "version": "1.6.2", "description": "Core types for paging async iterable iterators", "type": "module", "main": "./dist/commonjs/index.js", @@ -91,7 +91,7 @@ "playwright": "^1.41.2", "prettier": "^3.2.5", "rimraf": "^5.0.5", - "tshy": "^1.11.1", + "tshy": "^1.13.0", "typescript": "~5.3.3", "vitest": "^1.3.1" }, diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/accessTokenCache.js b/node_modules/@azure/core-rest-pipeline/dist/browser/accessTokenCache.js new file mode 100644 index 000000000..ff7dee1a6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/accessTokenCache.js @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Defines the default token refresh buffer duration. + */ +export const DefaultTokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes +/** + * Provides an AccessTokenCache implementation which clears + * the cached AccessToken's after the expiresOnTimestamp has + * passed. + * @internal + */ +export class ExpiringAccessTokenCache { + /** + * Constructs an instance of ExpiringAccessTokenCache with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs = DefaultTokenRefreshBufferMs) { + this.tokenRefreshBufferMs = tokenRefreshBufferMs; + } + setCachedToken(accessToken) { + this.cachedToken = accessToken; + } + getCachedToken() { + if (this.cachedToken && + Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { + this.cachedToken = undefined; + } + return this.cachedToken; + } +} +//# sourceMappingURL=accessTokenCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/constants.js b/node_modules/@azure/core-rest-pipeline/dist/browser/constants.js new file mode 100644 index 000000000..88acfa811 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/constants.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const SDK_VERSION = "1.16.0"; +export const DEFAULT_RETRY_POLICY_COUNT = 3; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/createPipelineFromOptions.js b/node_modules/@azure/core-rest-pipeline/dist/browser/createPipelineFromOptions.js new file mode 100644 index 000000000..1cc155029 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/createPipelineFromOptions.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { logPolicy } from "./policies/logPolicy.js"; +import { createEmptyPipeline } from "./pipeline.js"; +import { redirectPolicy } from "./policies/redirectPolicy.js"; +import { userAgentPolicy } from "./policies/userAgentPolicy.js"; +import { multipartPolicy, multipartPolicyName } from "./policies/multipartPolicy.js"; +import { decompressResponsePolicy } from "./policies/decompressResponsePolicy.js"; +import { defaultRetryPolicy } from "./policies/defaultRetryPolicy.js"; +import { formDataPolicy } from "./policies/formDataPolicy.js"; +import { isNodeLike } from "@azure/core-util"; +import { proxyPolicy } from "./policies/proxyPolicy.js"; +import { setClientRequestIdPolicy } from "./policies/setClientRequestIdPolicy.js"; +import { tlsPolicy } from "./policies/tlsPolicy.js"; +import { tracingPolicy } from "./policies/tracingPolicy.js"; +/** + * Create a new pipeline with a default set of customizable policies. + * @param options - Options to configure a custom pipeline. + */ +export function createPipelineFromOptions(options) { + var _a; + const pipeline = createEmptyPipeline(); + if (isNodeLike) { + if (options.tlsOptions) { + pipeline.addPolicy(tlsPolicy(options.tlsOptions)); + } + pipeline.addPolicy(proxyPolicy(options.proxyOptions)); + pipeline.addPolicy(decompressResponsePolicy()); + } + pipeline.addPolicy(formDataPolicy(), { beforePolicies: [multipartPolicyName] }); + pipeline.addPolicy(userAgentPolicy(options.userAgentOptions)); + pipeline.addPolicy(setClientRequestIdPolicy((_a = options.telemetryOptions) === null || _a === void 0 ? void 0 : _a.clientRequestIdHeaderName)); + // The multipart policy is added after policies with no phase, so that + // policies can be added between it and formDataPolicy to modify + // properties (e.g., making the boundary constant in recorded tests). + pipeline.addPolicy(multipartPolicy(), { afterPhase: "Deserialize" }); + pipeline.addPolicy(defaultRetryPolicy(options.retryOptions), { phase: "Retry" }); + pipeline.addPolicy(tracingPolicy(options.userAgentOptions), { afterPhase: "Retry" }); + if (isNodeLike) { + // Both XHR and Fetch expect to handle redirects automatically, + // so only include this policy when we're in Node. + pipeline.addPolicy(redirectPolicy(options.redirectOptions), { afterPhase: "Retry" }); + } + pipeline.addPolicy(logPolicy(options.loggingOptions), { afterPhase: "Sign" }); + return pipeline; +} +//# sourceMappingURL=createPipelineFromOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/defaultHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/browser/defaultHttpClient.js new file mode 100644 index 000000000..97d56f1ad --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/defaultHttpClient.js @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createFetchHttpClient } from "./fetchHttpClient.js"; +/** + * Create the correct HttpClient for the current environment. + */ +export function createDefaultHttpClient() { + return createFetchHttpClient(); +} +//# sourceMappingURL=defaultHttpClient-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/fetchHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/browser/fetchHttpClient.js new file mode 100644 index 000000000..64942e96d --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/fetchHttpClient.js @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +import { RestError } from "./restError.js"; +import { createHttpHeaders } from "./httpHeaders.js"; +import { isNodeReadableStream, isWebReadableStream } from "./util/typeGuards.js"; +/** + * Checks if the body is a Blob or Blob-like + */ +function isBlob(body) { + // File objects count as a type of Blob, so we want to use instanceof explicitly + return (typeof Blob === "function" || typeof Blob === "object") && body instanceof Blob; +} +/** + * A HttpClient implementation that uses window.fetch to send HTTP requests. + * @internal + */ +class FetchHttpClient { + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + try { + return await makeRequest(request); + } + catch (e) { + throw getError(e, request); + } + } +} +/** + * Sends a request + */ +async function makeRequest(request) { + const { abortController, abortControllerCleanup } = setupAbortSignal(request); + try { + const headers = buildFetchHeaders(request.headers); + const { streaming, body: requestBody } = buildRequestBody(request); + const requestInit = Object.assign(Object.assign({ body: requestBody, method: request.method, headers: headers, signal: abortController.signal }, ("credentials" in Request.prototype + ? { credentials: request.withCredentials ? "include" : "same-origin" } + : {})), ("cache" in Request.prototype ? { cache: "no-store" } : {})); + // According to https://fetch.spec.whatwg.org/#fetch-method, + // init.duplex must be set when body is a ReadableStream object. + // currently "half" is the only valid value. + if (streaming) { + requestInit.duplex = "half"; + } + /** + * Developers of the future: + * Do not set redirect: "manual" as part + * of request options. + * It will not work as you expect. + */ + const response = await fetch(request.url, requestInit); + // If we're uploading a blob, we need to fire the progress event manually + if (isBlob(request.body) && request.onUploadProgress) { + request.onUploadProgress({ loadedBytes: request.body.size }); + } + return buildPipelineResponse(response, request, abortControllerCleanup); + } + catch (e) { + abortControllerCleanup === null || abortControllerCleanup === void 0 ? void 0 : abortControllerCleanup(); + throw e; + } +} +/** + * Creates a pipeline response from a Fetch response; + */ +async function buildPipelineResponse(httpResponse, request, abortControllerCleanup) { + var _a, _b; + const headers = buildPipelineHeaders(httpResponse); + const response = { + request, + headers, + status: httpResponse.status, + }; + const bodyStream = isWebReadableStream(httpResponse.body) + ? buildBodyStream(httpResponse.body, { + onProgress: request.onDownloadProgress, + onEnd: abortControllerCleanup, + }) + : httpResponse.body; + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(Number.POSITIVE_INFINITY)) || + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(response.status))) { + if (request.enableBrowserStreams) { + response.browserStreamBody = bodyStream !== null && bodyStream !== void 0 ? bodyStream : undefined; + } + else { + const responseStream = new Response(bodyStream); + response.blobBody = responseStream.blob(); + abortControllerCleanup === null || abortControllerCleanup === void 0 ? void 0 : abortControllerCleanup(); + } + } + else { + const responseStream = new Response(bodyStream); + response.bodyAsText = await responseStream.text(); + abortControllerCleanup === null || abortControllerCleanup === void 0 ? void 0 : abortControllerCleanup(); + } + return response; +} +function setupAbortSignal(request) { + const abortController = new AbortController(); + // Cleanup function + let abortControllerCleanup; + /** + * Attach an abort listener to the request + */ + let abortListener; + if (request.abortSignal) { + if (request.abortSignal.aborted) { + throw new AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + request.abortSignal.addEventListener("abort", abortListener); + abortControllerCleanup = () => { + var _a; + if (abortListener) { + (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + } + }; + } + // If a timeout was passed, call the abort signal once the time elapses + if (request.timeout > 0) { + setTimeout(() => { + abortController.abort(); + }, request.timeout); + } + return { abortController, abortControllerCleanup }; +} +/** + * Gets the specific error + */ +// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters +function getError(e, request) { + var _a; + if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { + return e; + } + else { + return new RestError(`Error sending request: ${e.message}`, { + code: (_a = e === null || e === void 0 ? void 0 : e.code) !== null && _a !== void 0 ? _a : RestError.REQUEST_SEND_ERROR, + request, + }); + } +} +/** + * Converts PipelineRequest headers to Fetch headers + */ +function buildFetchHeaders(pipelineHeaders) { + const headers = new Headers(); + for (const [name, value] of pipelineHeaders) { + headers.append(name, value); + } + return headers; +} +function buildPipelineHeaders(httpResponse) { + const responseHeaders = createHttpHeaders(); + for (const [name, value] of httpResponse.headers) { + responseHeaders.set(name, value); + } + return responseHeaders; +} +function buildRequestBody(request) { + const body = typeof request.body === "function" ? request.body() : request.body; + if (isNodeReadableStream(body)) { + throw new Error("Node streams are not supported in browser environment."); + } + return isWebReadableStream(body) + ? { streaming: true, body: buildBodyStream(body, { onProgress: request.onUploadProgress }) } + : { streaming: false, body }; +} +/** + * Reads the request/response original stream and stream it through a new + * ReadableStream, this is done to be able to report progress in a way that + * all modern browsers support. TransformStreams would be an alternative, + * however they are not yet supported by all browsers i.e Firefox + */ +function buildBodyStream(readableStream, options = {}) { + let loadedBytes = 0; + const { onProgress, onEnd } = options; + // If the current browser supports pipeThrough we use a TransformStream + // to report progress + if (isTransformStreamSupported(readableStream)) { + return readableStream.pipeThrough(new TransformStream({ + transform(chunk, controller) { + if (chunk === null) { + controller.terminate(); + return; + } + controller.enqueue(chunk); + loadedBytes += chunk.length; + if (onProgress) { + onProgress({ loadedBytes }); + } + }, + flush() { + onEnd === null || onEnd === void 0 ? void 0 : onEnd(); + }, + })); + } + else { + // If we can't use transform streams, wrap the original stream in a new readable stream + // and use pull to enqueue each chunk and report progress. + const reader = readableStream.getReader(); + return new ReadableStream({ + async pull(controller) { + var _a; + const { done, value } = await reader.read(); + // When no more data needs to be consumed, break the reading + if (done || !value) { + onEnd === null || onEnd === void 0 ? void 0 : onEnd(); + // Close the stream + controller.close(); + reader.releaseLock(); + return; + } + loadedBytes += (_a = value === null || value === void 0 ? void 0 : value.length) !== null && _a !== void 0 ? _a : 0; + // Enqueue the next data chunk into our target stream + controller.enqueue(value); + if (onProgress) { + onProgress({ loadedBytes }); + } + }, + cancel(reason) { + onEnd === null || onEnd === void 0 ? void 0 : onEnd(); + return reader.cancel(reason); + }, + }); + } +} +/** + * Create a new HttpClient instance for the browser environment. + * @internal + */ +export function createFetchHttpClient() { + return new FetchHttpClient(); +} +function isTransformStreamSupported(readableStream) { + return readableStream.pipeThrough !== undefined && self.TransformStream !== undefined; +} +//# sourceMappingURL=fetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/httpHeaders.js b/node_modules/@azure/core-rest-pipeline/dist/browser/httpHeaders.js new file mode 100644 index 000000000..c4b7c919f --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/httpHeaders.js @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function normalizeName(name) { + return name.toLowerCase(); +} +function* headerIterator(map) { + for (const entry of map.values()) { + yield [entry.name, entry.value]; + } +} +class HttpHeadersImpl { + constructor(rawHeaders) { + this._headersMap = new Map(); + if (rawHeaders) { + for (const headerName of Object.keys(rawHeaders)) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param name - The name of the header to set. This value is case-insensitive. + * @param value - The value of the header to set. + */ + set(name, value) { + this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param name - The name of the header. This value is case-insensitive. + */ + get(name) { + var _a; + return (_a = this._headersMap.get(normalizeName(name))) === null || _a === void 0 ? void 0 : _a.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + * @param name - The name of the header to set. This value is case-insensitive. + */ + has(name) { + return this._headersMap.has(normalizeName(name)); + } + /** + * Remove the header with the provided headerName. + * @param name - The name of the header to remove. + */ + delete(name) { + this._headersMap.delete(normalizeName(name)); + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJSON(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const entry of this._headersMap.values()) { + result[entry.name] = entry.value; + } + } + else { + for (const [normalizedName, entry] of this._headersMap) { + result[normalizedName] = entry.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJSON({ preserveCase: true })); + } + /** + * Iterate over tuples of header [name, value] pairs. + */ + [Symbol.iterator]() { + return headerIterator(this._headersMap); + } +} +/** + * Creates an object that satisfies the `HttpHeaders` interface. + * @param rawHeaders - A simple object representing initial headers + */ +export function createHttpHeaders(rawHeaders) { + return new HttpHeadersImpl(rawHeaders); +} +//# sourceMappingURL=httpHeaders.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/index.js b/node_modules/@azure/core-rest-pipeline/dist/browser/index.js new file mode 100644 index 000000000..619ce9ca5 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/index.js @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { createEmptyPipeline, } from "./pipeline.js"; +export { createPipelineFromOptions, } from "./createPipelineFromOptions.js"; +export { createDefaultHttpClient } from "./defaultHttpClient.js"; +export { createHttpHeaders } from "./httpHeaders.js"; +export { createPipelineRequest } from "./pipelineRequest.js"; +export { RestError, isRestError } from "./restError.js"; +export { decompressResponsePolicy, decompressResponsePolicyName, } from "./policies/decompressResponsePolicy.js"; +export { exponentialRetryPolicy, exponentialRetryPolicyName, } from "./policies/exponentialRetryPolicy.js"; +export { setClientRequestIdPolicy, setClientRequestIdPolicyName, } from "./policies/setClientRequestIdPolicy.js"; +export { logPolicy, logPolicyName } from "./policies/logPolicy.js"; +export { multipartPolicy, multipartPolicyName } from "./policies/multipartPolicy.js"; +export { proxyPolicy, proxyPolicyName, getDefaultProxySettings } from "./policies/proxyPolicy.js"; +export { redirectPolicy, redirectPolicyName, } from "./policies/redirectPolicy.js"; +export { systemErrorRetryPolicy, systemErrorRetryPolicyName, } from "./policies/systemErrorRetryPolicy.js"; +export { throttlingRetryPolicy, throttlingRetryPolicyName, } from "./policies/throttlingRetryPolicy.js"; +export { retryPolicy } from "./policies/retryPolicy.js"; +export { tracingPolicy, tracingPolicyName, } from "./policies/tracingPolicy.js"; +export { defaultRetryPolicy, } from "./policies/defaultRetryPolicy.js"; +export { userAgentPolicy, userAgentPolicyName, } from "./policies/userAgentPolicy.js"; +export { tlsPolicy, tlsPolicyName } from "./policies/tlsPolicy.js"; +export { formDataPolicy, formDataPolicyName } from "./policies/formDataPolicy.js"; +export { bearerTokenAuthenticationPolicy, bearerTokenAuthenticationPolicyName, } from "./policies/bearerTokenAuthenticationPolicy.js"; +export { ndJsonPolicy, ndJsonPolicyName } from "./policies/ndJsonPolicy.js"; +export { auxiliaryAuthenticationHeaderPolicy, auxiliaryAuthenticationHeaderPolicyName, } from "./policies/auxiliaryAuthenticationHeaderPolicy.js"; +export { createFile, createFileFromStream, } from "./util/file.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/interfaces.js b/node_modules/@azure/core-rest-pipeline/dist/browser/interfaces.js new file mode 100644 index 000000000..c0a2e2e65 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/interfaces.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/log.js b/node_modules/@azure/core-rest-pipeline/dist/browser/log.js new file mode 100644 index 000000000..10a0a4eb6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/log.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +export const logger = createClientLogger("core-rest-pipeline"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/nodeHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/browser/nodeHttpClient.js new file mode 100644 index 000000000..9d49b52a1 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/nodeHttpClient.js @@ -0,0 +1,332 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as http from "node:http"; +import * as https from "node:https"; +import * as zlib from "node:zlib"; +import { Transform } from "node:stream"; +import { AbortError } from "@azure/abort-controller"; +import { createHttpHeaders } from "./httpHeaders.js"; +import { RestError } from "./restError.js"; +import { logger } from "./log.js"; +const DEFAULT_TLS_SETTINGS = {}; +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream) { + return new Promise((resolve) => { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} +function isArrayBuffer(body) { + return body && typeof body.byteLength === "number"; +} +class ReportTransform extends Transform { + // eslint-disable-next-line @typescript-eslint/ban-types + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + try { + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(); + } + catch (e) { + callback(e); + } + } + constructor(progressCallback) { + super(); + this.loadedBytes = 0; + this.progressCallback = progressCallback; + } +} +/** + * A HttpClient implementation that uses Node's "https" module to send HTTPS requests. + * @internal + */ +class NodeHttpClient { + constructor() { + this.cachedHttpsAgents = new WeakMap(); + } + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + var _a, _b, _c; + const abortController = new AbortController(); + let abortListener; + if (request.abortSignal) { + if (request.abortSignal.aborted) { + throw new AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + request.abortSignal.addEventListener("abort", abortListener); + } + if (request.timeout > 0) { + setTimeout(() => { + abortController.abort(); + }, request.timeout); + } + const acceptEncoding = request.headers.get("Accept-Encoding"); + const shouldDecompress = (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("gzip")) || (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("deflate")); + let body = typeof request.body === "function" ? request.body() : request.body; + if (body && !request.headers.has("Content-Length")) { + const bodyLength = getBodyLength(body); + if (bodyLength !== null) { + request.headers.set("Content-Length", bodyLength); + } + } + let responseStream; + try { + if (body && request.onUploadProgress) { + const onUploadProgress = request.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + uploadReportStream.on("error", (e) => { + logger.error("Error in upload progress", e); + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const res = await this.makeRequest(request, abortController, body); + const headers = getResponseHeaders(res); + const status = (_a = res.statusCode) !== null && _a !== void 0 ? _a : 0; + const response = { + status, + headers, + request, + }; + // Responses to HEAD must not have a body. + // If they do return a body, that body must be ignored. + if (request.method === "HEAD") { + // call resume() and not destroy() to avoid closing the socket + // and losing keep alive + res.resume(); + return response; + } + responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; + const onDownloadProgress = request.onDownloadProgress; + if (onDownloadProgress) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + downloadReportStream.on("error", (e) => { + logger.error("Error in download progress", e); + }); + responseStream.pipe(downloadReportStream); + responseStream = downloadReportStream; + } + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(Number.POSITIVE_INFINITY)) || + ((_c = request.streamResponseStatusCodes) === null || _c === void 0 ? void 0 : _c.has(response.status))) { + response.readableStreamBody = responseStream; + } + else { + response.bodyAsText = await streamToText(responseStream); + } + return response; + } + finally { + // clean up event listener + if (request.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(responseStream)) { + downloadStreamDone = isStreamComplete(responseStream); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + var _a; + // eslint-disable-next-line promise/always-return + if (abortListener) { + (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + } + }) + .catch((e) => { + logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + makeRequest(request, abortController, body) { + var _a; + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + const agent = (_a = request.agent) !== null && _a !== void 0 ? _a : this.getOrCreateAgent(request, isInsecure); + const options = { + agent, + hostname: url.hostname, + path: `${url.pathname}${url.search}`, + port: url.port, + method: request.method, + headers: request.headers.toJSON({ preserveCase: true }), + }; + return new Promise((resolve, reject) => { + const req = isInsecure ? http.request(options, resolve) : https.request(options, resolve); + req.once("error", (err) => { + var _a; + reject(new RestError(err.message, { code: (_a = err.code) !== null && _a !== void 0 ? _a : RestError.REQUEST_SEND_ERROR, request })); + }); + abortController.signal.addEventListener("abort", () => { + const abortError = new AbortError("The operation was aborted."); + req.destroy(abortError); + reject(abortError); + }); + if (body && isReadableStream(body)) { + body.pipe(req); + } + else if (body) { + if (typeof body === "string" || Buffer.isBuffer(body)) { + req.end(body); + } + else if (isArrayBuffer(body)) { + req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); + } + else { + logger.error("Unrecognized body type", body); + reject(new RestError("Unrecognized body type")); + } + } + else { + // streams don't like "undefined" being passed as data + req.end(); + } + }); + } + getOrCreateAgent(request, isInsecure) { + var _a; + const disableKeepAlive = request.disableKeepAlive; + // Handle Insecure requests first + if (isInsecure) { + if (disableKeepAlive) { + // keepAlive:false is the default so we don't need a custom Agent + return http.globalAgent; + } + if (!this.cachedHttpAgent) { + // If there is no cached agent create a new one and cache it. + this.cachedHttpAgent = new http.Agent({ keepAlive: true }); + } + return this.cachedHttpAgent; + } + else { + if (disableKeepAlive && !request.tlsSettings) { + // When there are no tlsSettings and keepAlive is false + // we don't need a custom agent + return https.globalAgent; + } + // We use the tlsSettings to index cached clients + const tlsSettings = (_a = request.tlsSettings) !== null && _a !== void 0 ? _a : DEFAULT_TLS_SETTINGS; + // Get the cached agent or create a new one with the + // provided values for keepAlive and tlsSettings + let agent = this.cachedHttpsAgents.get(tlsSettings); + if (agent && agent.options.keepAlive === !disableKeepAlive) { + return agent; + } + logger.info("No cached TLS Agent exist, creating a new Agent"); + agent = new https.Agent(Object.assign({ + // keepAlive is true if disableKeepAlive is false. + keepAlive: !disableKeepAlive }, tlsSettings)); + this.cachedHttpsAgents.set(tlsSettings, agent); + return agent; + } + } +} +function getResponseHeaders(res) { + const headers = createHttpHeaders(); + for (const header of Object.keys(res.headers)) { + const value = res.headers[header]; + if (Array.isArray(value)) { + if (value.length > 0) { + headers.set(header, value[0]); + } + } + else if (value) { + headers.set(header, value); + } + } + return headers; +} +function getDecodedResponseStream(stream, headers) { + const contentEncoding = headers.get("Content-Encoding"); + if (contentEncoding === "gzip") { + const unzip = zlib.createGunzip(); + stream.pipe(unzip); + return unzip; + } + else if (contentEncoding === "deflate") { + const inflate = zlib.createInflate(); + stream.pipe(inflate); + return inflate; + } + return stream; +} +function streamToText(stream) { + return new Promise((resolve, reject) => { + const buffer = []; + stream.on("data", (chunk) => { + if (Buffer.isBuffer(chunk)) { + buffer.push(chunk); + } + else { + buffer.push(Buffer.from(chunk)); + } + }); + stream.on("end", () => { + resolve(Buffer.concat(buffer).toString("utf8")); + }); + stream.on("error", (e) => { + if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { + reject(e); + } + else { + reject(new RestError(`Error reading response as text: ${e.message}`, { + code: RestError.PARSE_ERROR, + })); + } + }); + }); +} +/** @internal */ +export function getBodyLength(body) { + if (!body) { + return 0; + } + else if (Buffer.isBuffer(body)) { + return body.length; + } + else if (isReadableStream(body)) { + return null; + } + else if (isArrayBuffer(body)) { + return body.byteLength; + } + else if (typeof body === "string") { + return Buffer.from(body).length; + } + else { + return null; + } +} +/** + * Create a new HttpClient instance for the NodeJS environment. + * @internal + */ +export function createNodeHttpClient() { + return new NodeHttpClient(); +} +//# sourceMappingURL=nodeHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/package.json b/node_modules/@azure/core-rest-pipeline/dist/browser/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/pipeline.js b/node_modules/@azure/core-rest-pipeline/dist/browser/pipeline.js new file mode 100644 index 000000000..07e8ced7f --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/pipeline.js @@ -0,0 +1,262 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const ValidPhaseNames = new Set(["Deserialize", "Serialize", "Retry", "Sign"]); +/** + * A private implementation of Pipeline. + * Do not export this class from the package. + * @internal + */ +class HttpPipeline { + constructor(policies) { + var _a; + this._policies = []; + this._policies = (_a = policies === null || policies === void 0 ? void 0 : policies.slice(0)) !== null && _a !== void 0 ? _a : []; + this._orderedPolicies = undefined; + } + addPolicy(policy, options = {}) { + if (options.phase && options.afterPhase) { + throw new Error("Policies inside a phase cannot specify afterPhase."); + } + if (options.phase && !ValidPhaseNames.has(options.phase)) { + throw new Error(`Invalid phase name: ${options.phase}`); + } + if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { + throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); + } + this._policies.push({ + policy, + options, + }); + this._orderedPolicies = undefined; + } + removePolicy(options) { + const removedPolicies = []; + this._policies = this._policies.filter((policyDescriptor) => { + if ((options.name && policyDescriptor.policy.name === options.name) || + (options.phase && policyDescriptor.options.phase === options.phase)) { + removedPolicies.push(policyDescriptor.policy); + return false; + } + else { + return true; + } + }); + this._orderedPolicies = undefined; + return removedPolicies; + } + sendRequest(httpClient, request) { + const policies = this.getOrderedPolicies(); + const pipeline = policies.reduceRight((next, policy) => { + return (req) => { + return policy.sendRequest(req, next); + }; + }, (req) => httpClient.sendRequest(req)); + return pipeline(request); + } + getOrderedPolicies() { + if (!this._orderedPolicies) { + this._orderedPolicies = this.orderPolicies(); + } + return this._orderedPolicies; + } + clone() { + return new HttpPipeline(this._policies); + } + static create() { + return new HttpPipeline(); + } + orderPolicies() { + /** + * The goal of this method is to reliably order pipeline policies + * based on their declared requirements when they were added. + * + * Order is first determined by phase: + * + * 1. Serialize Phase + * 2. Policies not in a phase + * 3. Deserialize Phase + * 4. Retry Phase + * 5. Sign Phase + * + * Within each phase, policies are executed in the order + * they were added unless they were specified to execute + * before/after other policies or after a particular phase. + * + * To determine the final order, we will walk the policy list + * in phase order multiple times until all dependencies are + * satisfied. + * + * `afterPolicies` are the set of policies that must be + * executed before a given policy. This requirement is + * considered satisfied when each of the listed policies + * have been scheduled. + * + * `beforePolicies` are the set of policies that must be + * executed after a given policy. Since this dependency + * can be expressed by converting it into a equivalent + * `afterPolicies` declarations, they are normalized + * into that form for simplicity. + * + * An `afterPhase` dependency is considered satisfied when all + * policies in that phase have scheduled. + * + */ + const result = []; + // Track all policies we know about. + const policyMap = new Map(); + function createPhase(name) { + return { + name, + policies: new Set(), + hasRun: false, + hasAfterPolicies: false, + }; + } + // Track policies for each phase. + const serializePhase = createPhase("Serialize"); + const noPhase = createPhase("None"); + const deserializePhase = createPhase("Deserialize"); + const retryPhase = createPhase("Retry"); + const signPhase = createPhase("Sign"); + // a list of phases in order + const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; + // Small helper function to map phase name to each Phase + function getPhase(phase) { + if (phase === "Retry") { + return retryPhase; + } + else if (phase === "Serialize") { + return serializePhase; + } + else if (phase === "Deserialize") { + return deserializePhase; + } + else if (phase === "Sign") { + return signPhase; + } + else { + return noPhase; + } + } + // First walk each policy and create a node to track metadata. + for (const descriptor of this._policies) { + const policy = descriptor.policy; + const options = descriptor.options; + const policyName = policy.name; + if (policyMap.has(policyName)) { + throw new Error("Duplicate policy names not allowed in pipeline"); + } + const node = { + policy, + dependsOn: new Set(), + dependants: new Set(), + }; + if (options.afterPhase) { + node.afterPhase = getPhase(options.afterPhase); + node.afterPhase.hasAfterPolicies = true; + } + policyMap.set(policyName, node); + const phase = getPhase(options.phase); + phase.policies.add(node); + } + // Now that each policy has a node, connect dependency references. + for (const descriptor of this._policies) { + const { policy, options } = descriptor; + const policyName = policy.name; + const node = policyMap.get(policyName); + if (!node) { + throw new Error(`Missing node for policy ${policyName}`); + } + if (options.afterPolicies) { + for (const afterPolicyName of options.afterPolicies) { + const afterNode = policyMap.get(afterPolicyName); + if (afterNode) { + // Linking in both directions helps later + // when we want to notify dependants. + node.dependsOn.add(afterNode); + afterNode.dependants.add(node); + } + } + } + if (options.beforePolicies) { + for (const beforePolicyName of options.beforePolicies) { + const beforeNode = policyMap.get(beforePolicyName); + if (beforeNode) { + // To execute before another node, make it + // depend on the current node. + beforeNode.dependsOn.add(node); + node.dependants.add(beforeNode); + } + } + } + } + function walkPhase(phase) { + phase.hasRun = true; + // Sets iterate in insertion order + for (const node of phase.policies) { + if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { + // If this node is waiting on a phase to complete, + // we need to skip it for now. + // Even if the phase is empty, we should wait for it + // to be walked to avoid re-ordering policies. + continue; + } + if (node.dependsOn.size === 0) { + // If there's nothing else we're waiting for, we can + // add this policy to the result list. + result.push(node.policy); + // Notify anything that depends on this policy that + // the policy has been scheduled. + for (const dependant of node.dependants) { + dependant.dependsOn.delete(node); + } + policyMap.delete(node.policy.name); + phase.policies.delete(node); + } + } + } + function walkPhases() { + for (const phase of orderedPhases) { + walkPhase(phase); + // if the phase isn't complete + if (phase.policies.size > 0 && phase !== noPhase) { + if (!noPhase.hasRun) { + // Try running noPhase to see if that unblocks this phase next tick. + // This can happen if a phase that happens before noPhase + // is waiting on a noPhase policy to complete. + walkPhase(noPhase); + } + // Don't proceed to the next phase until this phase finishes. + return; + } + if (phase.hasAfterPolicies) { + // Run any policies unblocked by this phase + walkPhase(noPhase); + } + } + } + // Iterate until we've put every node in the result list. + let iteration = 0; + while (policyMap.size > 0) { + iteration++; + const initialResultLength = result.length; + // Keep walking each phase in order until we can order every node. + walkPhases(); + // The result list *should* get at least one larger each time + // after the first full pass. + // Otherwise, we're going to loop forever. + if (result.length <= initialResultLength && iteration > 1) { + throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); + } + } + return result; + } +} +/** + * Creates a totally empty pipeline. + * Useful for testing or creating a custom one. + */ +export function createEmptyPipeline() { + return HttpPipeline.create(); +} +//# sourceMappingURL=pipeline.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/pipelineRequest.js b/node_modules/@azure/core-rest-pipeline/dist/browser/pipelineRequest.js new file mode 100644 index 000000000..866575912 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/pipelineRequest.js @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHttpHeaders } from "./httpHeaders.js"; +import { randomUUID } from "@azure/core-util"; +class PipelineRequestImpl { + constructor(options) { + var _a, _b, _c, _d, _e, _f, _g; + this.url = options.url; + this.body = options.body; + this.headers = (_a = options.headers) !== null && _a !== void 0 ? _a : createHttpHeaders(); + this.method = (_b = options.method) !== null && _b !== void 0 ? _b : "GET"; + this.timeout = (_c = options.timeout) !== null && _c !== void 0 ? _c : 0; + this.multipartBody = options.multipartBody; + this.formData = options.formData; + this.disableKeepAlive = (_d = options.disableKeepAlive) !== null && _d !== void 0 ? _d : false; + this.proxySettings = options.proxySettings; + this.streamResponseStatusCodes = options.streamResponseStatusCodes; + this.withCredentials = (_e = options.withCredentials) !== null && _e !== void 0 ? _e : false; + this.abortSignal = options.abortSignal; + this.tracingOptions = options.tracingOptions; + this.onUploadProgress = options.onUploadProgress; + this.onDownloadProgress = options.onDownloadProgress; + this.requestId = options.requestId || randomUUID(); + this.allowInsecureConnection = (_f = options.allowInsecureConnection) !== null && _f !== void 0 ? _f : false; + this.enableBrowserStreams = (_g = options.enableBrowserStreams) !== null && _g !== void 0 ? _g : false; + } +} +/** + * Creates a new pipeline request with the given options. + * This method is to allow for the easy setting of default values and not required. + * @param options - The options to create the request with. + */ +export function createPipelineRequest(options) { + return new PipelineRequestImpl(options); +} +//# sourceMappingURL=pipelineRequest.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/auxiliaryAuthenticationHeaderPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/auxiliaryAuthenticationHeaderPolicy.js new file mode 100644 index 000000000..912154a6a --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/auxiliaryAuthenticationHeaderPolicy.js @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createTokenCycler } from "../util/tokenCycler.js"; +import { logger as coreLogger } from "../log.js"; +/** + * The programmatic identifier of the auxiliaryAuthenticationHeaderPolicy. + */ +export const auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy"; +const AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary"; +async function sendAuthorizeRequest(options) { + var _a, _b; + const { scopes, getAccessToken, request } = options; + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + }; + return (_b = (_a = (await getAccessToken(scopes, getTokenOptions))) === null || _a === void 0 ? void 0 : _a.token) !== null && _b !== void 0 ? _b : ""; +} +/** + * A policy for external tokens to `x-ms-authorization-auxiliary` header. + * This header will be used when creating a cross-tenant application we may need to handle authentication requests + * for resources that are in different tenants. + * You could see [ARM docs](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/authenticate-multi-tenant) for a rundown of how this feature works + */ +export function auxiliaryAuthenticationHeaderPolicy(options) { + const { credentials, scopes } = options; + const logger = options.logger || coreLogger; + const tokenCyclerMap = new WeakMap(); + return { + name: auxiliaryAuthenticationHeaderPolicyName, + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs."); + } + if (!credentials || credentials.length === 0) { + logger.info(`${auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`); + return next(request); + } + const tokenPromises = []; + for (const credential of credentials) { + let getAccessToken = tokenCyclerMap.get(credential); + if (!getAccessToken) { + getAccessToken = createTokenCycler(credential); + tokenCyclerMap.set(credential, getAccessToken); + } + tokenPromises.push(sendAuthorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger, + })); + } + const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token)); + if (auxiliaryTokens.length === 0) { + logger.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`); + return next(request); + } + request.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", ")); + return next(request); + }, + }; +} +//# sourceMappingURL=auxiliaryAuthenticationHeaderPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/bearerTokenAuthenticationPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/bearerTokenAuthenticationPolicy.js new file mode 100644 index 000000000..4ef00eb8d --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/bearerTokenAuthenticationPolicy.js @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createTokenCycler } from "../util/tokenCycler.js"; +import { logger as coreLogger } from "../log.js"; +/** + * The programmatic identifier of the bearerTokenAuthenticationPolicy. + */ +export const bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy"; +/** + * Default authorize request handler + */ +async function defaultAuthorizeRequest(options) { + const { scopes, getAccessToken, request } = options; + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + }; + const accessToken = await getAccessToken(scopes, getTokenOptions); + if (accessToken) { + options.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + } +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * A policy that can request a token from a TokenCredential implementation and + * then apply it to the Authorization header of a request as a Bearer token. + */ +export function bearerTokenAuthenticationPolicy(options) { + var _a; + const { credential, scopes, challengeCallbacks } = options; + const logger = options.logger || coreLogger; + const callbacks = Object.assign({ authorizeRequest: (_a = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequest) !== null && _a !== void 0 ? _a : defaultAuthorizeRequest, authorizeRequestOnChallenge: challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequestOnChallenge }, challengeCallbacks); + // This function encapsulates the entire process of reliably retrieving the token + // The options are left out of the public API until there's demand to configure this. + // Remember to extend `BearerTokenAuthenticationPolicyOptions` with `TokenCyclerOptions` + // in order to pass through the `options` object. + const getAccessToken = credential + ? createTokenCycler(credential /* , options */) + : () => Promise.resolve(null); + return { + name: bearerTokenAuthenticationPolicyName, + /** + * If there's no challenge parameter: + * - It will try to retrieve the token using the cache, or the credential's getToken. + * - Then it will try the next policy with or without the retrieved token. + * + * It uses the challenge parameters to: + * - Skip a first attempt to get the token from the credential if there's no cached token, + * since it expects the token to be retrievable only after the challenge. + * - Prepare the outgoing request if the `prepareRequest` method has been provided. + * - Send an initial request to receive the challenge if it fails. + * - Process a challenge if the response contains it. + * - Retrieve a token with the challenge information, then re-send the request. + */ + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + await callbacks.authorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger, + }); + let response; + let error; + try { + response = await next(request); + } + catch (err) { + error = err; + response = err.response; + } + if (callbacks.authorizeRequestOnChallenge && + (response === null || response === void 0 ? void 0 : response.status) === 401 && + getChallenge(response)) { + // processes challenge + const shouldSendRequest = await callbacks.authorizeRequestOnChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + response, + getAccessToken, + logger, + }); + if (shouldSendRequest) { + return next(request); + } + } + if (error) { + throw error; + } + else { + return response; + } + }, + }; +} +//# sourceMappingURL=bearerTokenAuthenticationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/decompressResponsePolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/decompressResponsePolicy.js new file mode 100644 index 000000000..4965ad030 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/decompressResponsePolicy.js @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* + * NOTE: When moving this file, please update "browser" section in package.json + */ +export const decompressResponsePolicyName = "decompressResponsePolicy"; +/** + * decompressResponsePolicy is not supported in the browser and attempting + * to use it will raise an error. + */ +export function decompressResponsePolicy() { + throw new Error("decompressResponsePolicy is not supported in browser environment"); +} +//# sourceMappingURL=decompressResponsePolicy-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/defaultRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/defaultRetryPolicy.js new file mode 100644 index 000000000..bda83eb35 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/defaultRetryPolicy.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { exponentialRetryStrategy } from "../retryStrategies/exponentialRetryStrategy.js"; +import { throttlingRetryStrategy } from "../retryStrategies/throttlingRetryStrategy.js"; +import { retryPolicy } from "./retryPolicy.js"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +/** + * Name of the {@link defaultRetryPolicy} + */ +export const defaultRetryPolicyName = "defaultRetryPolicy"; +/** + * A policy that retries according to three strategies: + * - When the server sends a 429 response with a Retry-After header. + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay. + */ +export function defaultRetryPolicy(options = {}) { + var _a; + return { + name: defaultRetryPolicyName, + sendRequest: retryPolicy([throttlingRetryStrategy(), exponentialRetryStrategy(options)], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=defaultRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/exponentialRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/exponentialRetryPolicy.js new file mode 100644 index 000000000..12193c26b --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/exponentialRetryPolicy.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { exponentialRetryStrategy } from "../retryStrategies/exponentialRetryStrategy.js"; +import { retryPolicy } from "./retryPolicy.js"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +/** + * The programmatic identifier of the exponentialRetryPolicy. + */ +export const exponentialRetryPolicyName = "exponentialRetryPolicy"; +/** + * A policy that attempts to retry requests while introducing an exponentially increasing delay. + * @param options - Options that configure retry logic. + */ +export function exponentialRetryPolicy(options = {}) { + var _a; + return retryPolicy([ + exponentialRetryStrategy(Object.assign(Object.assign({}, options), { ignoreSystemErrors: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }); +} +//# sourceMappingURL=exponentialRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/formDataPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/formDataPolicy.js new file mode 100644 index 000000000..e4816a489 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/formDataPolicy.js @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNodeLike, stringToUint8Array } from "@azure/core-util"; +import { createHttpHeaders } from "../httpHeaders.js"; +/** + * The programmatic identifier of the formDataPolicy. + */ +export const formDataPolicyName = "formDataPolicy"; +function formDataToFormDataMap(formData) { + var _a; + const formDataMap = {}; + for (const [key, value] of formData.entries()) { + (_a = formDataMap[key]) !== null && _a !== void 0 ? _a : (formDataMap[key] = []); + formDataMap[key].push(value); + } + return formDataMap; +} +/** + * A policy that encodes FormData on the request into the body. + */ +export function formDataPolicy() { + return { + name: formDataPolicyName, + async sendRequest(request, next) { + if (isNodeLike && typeof FormData !== "undefined" && request.body instanceof FormData) { + request.formData = formDataToFormDataMap(request.body); + request.body = undefined; + } + if (request.formData) { + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { + request.body = wwwFormUrlEncode(request.formData); + } + else { + await prepareFormData(request.formData, request); + } + request.formData = undefined; + } + return next(request); + }, + }; +} +function wwwFormUrlEncode(formData) { + const urlSearchParams = new URLSearchParams(); + for (const [key, value] of Object.entries(formData)) { + if (Array.isArray(value)) { + for (const subValue of value) { + urlSearchParams.append(key, subValue.toString()); + } + } + else { + urlSearchParams.append(key, value.toString()); + } + } + return urlSearchParams.toString(); +} +async function prepareFormData(formData, request) { + // validate content type (multipart/form-data) + const contentType = request.headers.get("Content-Type"); + if (contentType && !contentType.startsWith("multipart/form-data")) { + // content type is specified and is not multipart/form-data. Exit. + return; + } + request.headers.set("Content-Type", contentType !== null && contentType !== void 0 ? contentType : "multipart/form-data"); + // set body to MultipartRequestBody using content from FormDataMap + const parts = []; + for (const [fieldName, values] of Object.entries(formData)) { + for (const value of Array.isArray(values) ? values : [values]) { + if (typeof value === "string") { + parts.push({ + headers: createHttpHeaders({ + "Content-Disposition": `form-data; name="${fieldName}"`, + }), + body: stringToUint8Array(value, "utf-8"), + }); + } + else if (value === undefined || value === null || typeof value !== "object") { + throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`); + } + else { + // using || instead of ?? here since if value.name is empty we should create a file name + const fileName = value.name || "blob"; + const headers = createHttpHeaders(); + headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); + // again, || is used since an empty value.type means the content type is unset + headers.set("Content-Type", value.type || "application/octet-stream"); + parts.push({ + headers, + body: value, + }); + } + } + } + request.multipartBody = { parts }; +} +//# sourceMappingURL=formDataPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/logPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/logPolicy.js new file mode 100644 index 000000000..e981567c6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/logPolicy.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { logger as coreLogger } from "../log.js"; +import { Sanitizer } from "../util/sanitizer.js"; +/** + * The programmatic identifier of the logPolicy. + */ +export const logPolicyName = "logPolicy"; +/** + * A policy that logs all requests and responses. + * @param options - Options to configure logPolicy. + */ +export function logPolicy(options = {}) { + var _a; + const logger = (_a = options.logger) !== null && _a !== void 0 ? _a : coreLogger.info; + const sanitizer = new Sanitizer({ + additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, + additionalAllowedQueryParameters: options.additionalAllowedQueryParameters, + }); + return { + name: logPolicyName, + async sendRequest(request, next) { + if (!logger.enabled) { + return next(request); + } + logger(`Request: ${sanitizer.sanitize(request)}`); + const response = await next(request); + logger(`Response status code: ${response.status}`); + logger(`Headers: ${sanitizer.sanitize(response.headers)}`); + return response; + }, + }; +} +//# sourceMappingURL=logPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/multipartPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/multipartPolicy.js new file mode 100644 index 000000000..efa3fdcf9 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/multipartPolicy.js @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { randomUUID, stringToUint8Array } from "@azure/core-util"; +import { concat } from "../util/concat.js"; +import { isBlob } from "../util/typeGuards.js"; +function generateBoundary() { + return `----AzSDKFormBoundary${randomUUID()}`; +} +function encodeHeaders(headers) { + let result = ""; + for (const [key, value] of headers) { + result += `${key}: ${value}\r\n`; + } + return result; +} +function getLength(source) { + if (source instanceof Uint8Array) { + return source.byteLength; + } + else if (isBlob(source)) { + // if was created using createFile then -1 means we have an unknown size + return source.size === -1 ? undefined : source.size; + } + else { + return undefined; + } +} +function getTotalLength(sources) { + let total = 0; + for (const source of sources) { + const partLength = getLength(source); + if (partLength === undefined) { + return undefined; + } + else { + total += partLength; + } + } + return total; +} +async function buildRequestBody(request, parts, boundary) { + const sources = [ + stringToUint8Array(`--${boundary}`, "utf-8"), + ...parts.flatMap((part) => [ + stringToUint8Array("\r\n", "utf-8"), + stringToUint8Array(encodeHeaders(part.headers), "utf-8"), + stringToUint8Array("\r\n", "utf-8"), + part.body, + stringToUint8Array(`\r\n--${boundary}`, "utf-8"), + ]), + stringToUint8Array("--\r\n\r\n", "utf-8"), + ]; + const contentLength = getTotalLength(sources); + if (contentLength) { + request.headers.set("Content-Length", contentLength); + } + request.body = await concat(sources); +} +/** + * Name of multipart policy + */ +export const multipartPolicyName = "multipartPolicy"; +const maxBoundaryLength = 70; +const validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`); +function assertValidBoundary(boundary) { + if (boundary.length > maxBoundaryLength) { + throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`); + } + if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) { + throw new Error(`Multipart boundary "${boundary}" contains invalid characters`); + } +} +/** + * Pipeline policy for multipart requests + */ +export function multipartPolicy() { + return { + name: multipartPolicyName, + async sendRequest(request, next) { + var _a; + if (!request.multipartBody) { + return next(request); + } + if (request.body) { + throw new Error("multipartBody and regular body cannot be set at the same time"); + } + let boundary = request.multipartBody.boundary; + const contentTypeHeader = (_a = request.headers.get("Content-Type")) !== null && _a !== void 0 ? _a : "multipart/mixed"; + const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/); + if (!parsedHeader) { + throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`); + } + const [, contentType, parsedBoundary] = parsedHeader; + if (parsedBoundary && boundary && parsedBoundary !== boundary) { + throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`); + } + boundary !== null && boundary !== void 0 ? boundary : (boundary = parsedBoundary); + if (boundary) { + assertValidBoundary(boundary); + } + else { + boundary = generateBoundary(); + } + request.headers.set("Content-Type", `${contentType}; boundary=${boundary}`); + await buildRequestBody(request, request.multipartBody.parts, boundary); + request.multipartBody = undefined; + return next(request); + }, + }; +} +//# sourceMappingURL=multipartPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/ndJsonPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/ndJsonPolicy.js new file mode 100644 index 000000000..3b1fa6fb9 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/ndJsonPolicy.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the ndJsonPolicy. + */ +export const ndJsonPolicyName = "ndJsonPolicy"; +/** + * ndJsonPolicy is a policy used to control keep alive settings for every request. + */ +export function ndJsonPolicy() { + return { + name: ndJsonPolicyName, + async sendRequest(request, next) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return next(request); + }, + }; +} +//# sourceMappingURL=ndJsonPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/proxyPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/proxyPolicy.js new file mode 100644 index 000000000..e8461d00d --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/proxyPolicy.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* + * NOTE: When moving this file, please update "browser" section in package.json + */ +export const proxyPolicyName = "proxyPolicy"; +const errorMessage = "proxyPolicy is not supported in browser environment"; +export function getDefaultProxySettings() { + throw new Error(errorMessage); +} +/** + * proxyPolicy is not supported in the browser and attempting + * to use it will raise an error. + */ +export function proxyPolicy() { + throw new Error(errorMessage); +} +/** + * A function to reset the cached agents. + * proxyPolicy is not supported in the browser and attempting + * to use it will raise an error. + * @internal + */ +export function resetCachedProxyAgents() { + throw new Error(errorMessage); +} +//# sourceMappingURL=proxyPolicy-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/redirectPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/redirectPolicy.js new file mode 100644 index 000000000..302a1b8ef --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/redirectPolicy.js @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the redirectPolicy. + */ +export const redirectPolicyName = "redirectPolicy"; +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +/** + * A policy to follow Location headers from the server in order + * to support server-side redirection. + * In the browser, this policy is not used. + * @param options - Options to control policy behavior. + */ +export function redirectPolicy(options = {}) { + const { maxRetries = 20 } = options; + return { + name: redirectPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return handleRedirect(next, response, maxRetries); + }, + }; +} +async function handleRedirect(next, response, maxRetries, currentRetries = 0) { + const { request, status, headers } = response; + const locationHeader = headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + currentRetries < maxRetries) { + const url = new URL(locationHeader, request.url); + request.url = url.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + request.headers.delete("Content-Length"); + delete request.body; + } + request.headers.delete("Authorization"); + const res = await next(request); + return handleRedirect(next, res, maxRetries, currentRetries + 1); + } + return response; +} +//# sourceMappingURL=redirectPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/retryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/retryPolicy.js new file mode 100644 index 000000000..f58396f45 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/retryPolicy.js @@ -0,0 +1,106 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { delay } from "../util/helpers.js"; +import { createClientLogger } from "@azure/logger"; +import { AbortError } from "@azure/abort-controller"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +const retryPolicyLogger = createClientLogger("core-rest-pipeline retryPolicy"); +/** + * The programmatic identifier of the retryPolicy. + */ +const retryPolicyName = "retryPolicy"; +/** + * retryPolicy is a generic policy to enable retrying requests when certain conditions are met + */ +export function retryPolicy(strategies, options = { maxRetries: DEFAULT_RETRY_POLICY_COUNT }) { + const logger = options.logger || retryPolicyLogger; + return { + name: retryPolicyName, + async sendRequest(request, next) { + var _a, _b; + let response; + let responseError; + let retryCount = -1; + // eslint-disable-next-line no-constant-condition + retryRequest: while (true) { + retryCount += 1; + response = undefined; + responseError = undefined; + try { + logger.info(`Retry ${retryCount}: Attempting to send request`, request.requestId); + response = await next(request); + logger.info(`Retry ${retryCount}: Received a response from request`, request.requestId); + } + catch (e) { + logger.error(`Retry ${retryCount}: Received an error from request`, request.requestId); + // RestErrors are valid targets for the retry strategies. + // If none of the retry strategies can work with them, they will be thrown later in this policy. + // If the received error is not a RestError, it is immediately thrown. + responseError = e; + if (!e || responseError.name !== "RestError") { + throw e; + } + response = responseError.response; + } + if ((_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + logger.error(`Retry ${retryCount}: Request aborted.`); + const abortError = new AbortError(); + throw abortError; + } + if (retryCount >= ((_b = options.maxRetries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_POLICY_COUNT)) { + logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); + if (responseError) { + throw responseError; + } + else if (response) { + return response; + } + else { + throw new Error("Maximum retries reached with no response or error to throw"); + } + } + logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); + strategiesLoop: for (const strategy of strategies) { + const strategyLogger = strategy.logger || retryPolicyLogger; + strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); + const modifiers = strategy.retry({ + retryCount, + response, + responseError, + }); + if (modifiers.skipStrategy) { + strategyLogger.info(`Retry ${retryCount}: Skipped.`); + continue strategiesLoop; + } + const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; + if (errorToThrow) { + strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); + throw errorToThrow; + } + if (retryAfterInMs || retryAfterInMs === 0) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); + await delay(retryAfterInMs, undefined, { abortSignal: request.abortSignal }); + continue retryRequest; + } + if (redirectTo) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); + request.url = redirectTo; + continue retryRequest; + } + } + if (responseError) { + logger.info(`None of the retry strategies could work with the received error. Throwing it.`); + throw responseError; + } + if (response) { + logger.info(`None of the retry strategies could work with the received response. Returning it.`); + return response; + } + // If all the retries skip and there's no response, + // we're still in the retry loop, so a new request will be sent + // until `maxRetries` is reached. + } + }, + }; +} +//# sourceMappingURL=retryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/setClientRequestIdPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/setClientRequestIdPolicy.js new file mode 100644 index 000000000..46baba4cd --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/setClientRequestIdPolicy.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the setClientRequestIdPolicy. + */ +export const setClientRequestIdPolicyName = "setClientRequestIdPolicy"; +/** + * Each PipelineRequest gets a unique id upon creation. + * This policy passes that unique id along via an HTTP header to enable better + * telemetry and tracing. + * @param requestIdHeaderName - The name of the header to pass the request ID to. + */ +export function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + name: setClientRequestIdPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(requestIdHeaderName)) { + request.headers.set(requestIdHeaderName, request.requestId); + } + return next(request); + }, + }; +} +//# sourceMappingURL=setClientRequestIdPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/systemErrorRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/systemErrorRetryPolicy.js new file mode 100644 index 000000000..f6b608a75 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/systemErrorRetryPolicy.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { exponentialRetryStrategy } from "../retryStrategies/exponentialRetryStrategy.js"; +import { retryPolicy } from "./retryPolicy.js"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +/** + * Name of the {@link systemErrorRetryPolicy} + */ +export const systemErrorRetryPolicyName = "systemErrorRetryPolicy"; +/** + * A retry policy that specifically seeks to handle errors in the + * underlying transport layer (e.g. DNS lookup failures) rather than + * retryable error codes from the server itself. + * @param options - Options that customize the policy. + */ +export function systemErrorRetryPolicy(options = {}) { + var _a; + return { + name: systemErrorRetryPolicyName, + sendRequest: retryPolicy([ + exponentialRetryStrategy(Object.assign(Object.assign({}, options), { ignoreHttpStatusCodes: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=systemErrorRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/throttlingRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/throttlingRetryPolicy.js new file mode 100644 index 000000000..9349e6e07 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/throttlingRetryPolicy.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { throttlingRetryStrategy } from "../retryStrategies/throttlingRetryStrategy.js"; +import { retryPolicy } from "./retryPolicy.js"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +/** + * Name of the {@link throttlingRetryPolicy} + */ +export const throttlingRetryPolicyName = "throttlingRetryPolicy"; +/** + * A policy that retries when the server sends a 429 response with a Retry-After header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * + * @param options - Options that configure retry logic. + */ +export function throttlingRetryPolicy(options = {}) { + var _a; + return { + name: throttlingRetryPolicyName, + sendRequest: retryPolicy([throttlingRetryStrategy()], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=throttlingRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/tlsPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/tlsPolicy.js new file mode 100644 index 000000000..67ce535c6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/tlsPolicy.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Name of the TLS Policy + */ +export const tlsPolicyName = "tlsPolicy"; +/** + * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication. + */ +export function tlsPolicy(tlsSettings) { + return { + name: tlsPolicyName, + sendRequest: async (req, next) => { + // Users may define a request tlsSettings, honor those over the client level one + if (!req.tlsSettings) { + req.tlsSettings = tlsSettings; + } + return next(req); + }, + }; +} +//# sourceMappingURL=tlsPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/tracingPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/tracingPolicy.js new file mode 100644 index 000000000..d077e7733 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/tracingPolicy.js @@ -0,0 +1,120 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createTracingClient, } from "@azure/core-tracing"; +import { SDK_VERSION } from "../constants.js"; +import { getUserAgentValue } from "../util/userAgent.js"; +import { logger } from "../log.js"; +import { getErrorMessage, isError } from "@azure/core-util"; +import { isRestError } from "../restError.js"; +/** + * The programmatic identifier of the tracingPolicy. + */ +export const tracingPolicyName = "tracingPolicy"; +/** + * A simple policy to create OpenTelemetry Spans for each request made by the pipeline + * that has SpanOptions with a parent. + * Requests made without a parent Span will not be recorded. + * @param options - Options to configure the telemetry logged by the tracing policy. + */ +export function tracingPolicy(options = {}) { + const userAgent = getUserAgentValue(options.userAgentPrefix); + const tracingClient = tryCreateTracingClient(); + return { + name: tracingPolicyName, + async sendRequest(request, next) { + var _a, _b; + if (!tracingClient || !((_a = request.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext)) { + return next(request); + } + const { span, tracingContext } = (_b = tryCreateSpan(tracingClient, request, userAgent)) !== null && _b !== void 0 ? _b : {}; + if (!span || !tracingContext) { + return next(request); + } + try { + const response = await tracingClient.withContext(tracingContext, next, request); + tryProcessResponse(span, response); + return response; + } + catch (err) { + tryProcessError(span, err); + throw err; + } + }, + }; +} +function tryCreateTracingClient() { + try { + return createTracingClient({ + namespace: "", + packageName: "@azure/core-rest-pipeline", + packageVersion: SDK_VERSION, + }); + } + catch (e) { + logger.warning(`Error when creating the TracingClient: ${getErrorMessage(e)}`); + return undefined; + } +} +function tryCreateSpan(tracingClient, request, userAgent) { + try { + // As per spec, we do not need to differentiate between HTTP and HTTPS in span name. + const { span, updatedOptions } = tracingClient.startSpan(`HTTP ${request.method}`, { tracingOptions: request.tracingOptions }, { + spanKind: "client", + spanAttributes: { + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId, + }, + }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; + } + if (userAgent) { + span.setAttribute("http.user_agent", userAgent); + } + // set headers + const headers = tracingClient.createRequestHeaders(updatedOptions.tracingOptions.tracingContext); + for (const [key, value] of Object.entries(headers)) { + request.headers.set(key, value); + } + return { span, tracingContext: updatedOptions.tracingOptions.tracingContext }; + } + catch (e) { + logger.warning(`Skipping creating a tracing span due to an error: ${getErrorMessage(e)}`); + return undefined; + } +} +function tryProcessError(span, error) { + try { + span.setStatus({ + status: "error", + error: isError(error) ? error : undefined, + }); + if (isRestError(error) && error.statusCode) { + span.setAttribute("http.status_code", error.statusCode); + } + span.end(); + } + catch (e) { + logger.warning(`Skipping tracing span processing due to an error: ${getErrorMessage(e)}`); + } +} +function tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + status: "success", + }); + span.end(); + } + catch (e) { + logger.warning(`Skipping tracing span processing due to an error: ${getErrorMessage(e)}`); + } +} +//# sourceMappingURL=tracingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/policies/userAgentPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/userAgentPolicy.js new file mode 100644 index 000000000..10ae6852c --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/policies/userAgentPolicy.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getUserAgentHeaderName, getUserAgentValue } from "../util/userAgent.js"; +const UserAgentHeaderName = getUserAgentHeaderName(); +/** + * The programmatic identifier of the userAgentPolicy. + */ +export const userAgentPolicyName = "userAgentPolicy"; +/** + * A policy that sets the User-Agent header (or equivalent) to reflect + * the library version. + * @param options - Options to customize the user agent value. + */ +export function userAgentPolicy(options = {}) { + const userAgentValue = getUserAgentValue(options.userAgentPrefix); + return { + name: userAgentPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(UserAgentHeaderName)) { + request.headers.set(UserAgentHeaderName, userAgentValue); + } + return next(request); + }, + }; +} +//# sourceMappingURL=userAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/restError.js b/node_modules/@azure/core-rest-pipeline/dist/browser/restError.js new file mode 100644 index 000000000..9008050e6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/restError.js @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isError } from "@azure/core-util"; +import { custom } from "./util/inspect.js"; +import { Sanitizer } from "./util/sanitizer.js"; +const errorSanitizer = new Sanitizer(); +/** + * A custom error type for failed pipeline requests. + */ +export class RestError extends Error { + constructor(message, options = {}) { + super(message); + this.name = "RestError"; + this.code = options.code; + this.statusCode = options.statusCode; + this.request = options.request; + this.response = options.response; + Object.setPrototypeOf(this, RestError.prototype); + } + /** + * Logging method for util.inspect in Node + */ + [custom]() { + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; + } +} +/** + * Something went wrong when making the request. + * This means the actual request failed for some reason, + * such as a DNS issue or the connection being lost. + */ +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * This means that parsing the response from the server failed. + * It may have been malformed. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; +/** + * Typeguard for RestError + * @param e - Something caught by a catch clause. + */ +export function isRestError(e) { + if (e instanceof RestError) { + return true; + } + return isError(e) && e.name === "RestError"; +} +//# sourceMappingURL=restError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/retryStrategies/exponentialRetryStrategy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/retryStrategies/exponentialRetryStrategy.js new file mode 100644 index 000000000..c1e5c0d4e --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/retryStrategies/exponentialRetryStrategy.js @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getRandomIntegerInclusive } from "@azure/core-util"; +import { isThrottlingRetryResponse } from "./throttlingRetryStrategy.js"; +// intervals are in milliseconds +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 64; +/** + * A retry strategy that retries with an exponentially increasing delay in these two cases: + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails (408, greater or equal than 500, except for 501 and 505). + */ +export function exponentialRetryStrategy(options = {}) { + var _a, _b; + const retryInterval = (_a = options.retryDelayInMs) !== null && _a !== void 0 ? _a : DEFAULT_CLIENT_RETRY_INTERVAL; + const maxRetryInterval = (_b = options.maxRetryDelayInMs) !== null && _b !== void 0 ? _b : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + let retryAfterInMs = retryInterval; + return { + name: "exponentialRetryStrategy", + retry({ retryCount, response, responseError }) { + const matchedSystemError = isSystemError(responseError); + const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; + const isExponential = isExponentialRetryResponse(response); + const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; + const unknownResponse = response && (isThrottlingRetryResponse(response) || !isExponential); + if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { + return { skipStrategy: true }; + } + if (responseError && !matchedSystemError && !isExponential) { + return { errorToThrow: responseError }; + } + // Exponentially increase the delay each time + const exponentialDelay = retryAfterInMs * Math.pow(2, retryCount); + // Don't let the delay exceed the maximum + const clampedExponentialDelay = Math.min(maxRetryInterval, exponentialDelay); + // Allow the final value to have some "jitter" (within 50% of the delay size) so + // that retries across multiple clients don't occur simultaneously. + retryAfterInMs = + clampedExponentialDelay / 2 + getRandomIntegerInclusive(0, clampedExponentialDelay / 2); + return { retryAfterInMs }; + }, + }; +} +/** + * A response is a retry response if it has status codes: + * - 408, or + * - Greater or equal than 500, except for 501 and 505. + */ +export function isExponentialRetryResponse(response) { + return Boolean(response && + response.status !== undefined && + (response.status >= 500 || response.status === 408) && + response.status !== 501 && + response.status !== 505); +} +/** + * Determines whether an error from a pipeline response was triggered in the network layer. + */ +export function isSystemError(err) { + if (!err) { + return false; + } + return (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT" || + err.code === "ENOTFOUND"); +} +//# sourceMappingURL=exponentialRetryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/retryStrategies/retryStrategy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/retryStrategies/retryStrategy.js new file mode 100644 index 000000000..4b2354b00 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/retryStrategies/retryStrategy.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=retryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/retryStrategies/throttlingRetryStrategy.js b/node_modules/@azure/core-rest-pipeline/dist/browser/retryStrategies/throttlingRetryStrategy.js new file mode 100644 index 000000000..5990ccc90 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/retryStrategies/throttlingRetryStrategy.js @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { parseHeaderValueAsNumber } from "../util/helpers.js"; +/** + * The header that comes back from Azure services representing + * the amount of time (minimum) to wait to retry (in seconds or timestamp after which we can retry). + */ +const RetryAfterHeader = "Retry-After"; +/** + * The headers that come back from Azure services representing + * the amount of time (minimum) to wait to retry. + * + * "retry-after-ms", "x-ms-retry-after-ms" : milliseconds + * "Retry-After" : seconds or timestamp + */ +const AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; +/** + * A response is a throttling retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + * + * Returns the `retryAfterInMs` value if the response is a throttling retry response. + * If not throttling retry response, returns `undefined`. + * + * @internal + */ +function getRetryAfterInMs(response) { + if (!(response && [429, 503].includes(response.status))) + return undefined; + try { + // Headers: "retry-after-ms", "x-ms-retry-after-ms", "Retry-After" + for (const header of AllRetryAfterHeaders) { + const retryAfterValue = parseHeaderValueAsNumber(response, header); + if (retryAfterValue === 0 || retryAfterValue) { + // "Retry-After" header ==> seconds + // "retry-after-ms", "x-ms-retry-after-ms" headers ==> milli-seconds + const multiplyingFactor = header === RetryAfterHeader ? 1000 : 1; + return retryAfterValue * multiplyingFactor; // in milli-seconds + } + } + // RetryAfterHeader ("Retry-After") has a special case where it might be formatted as a date instead of a number of seconds + const retryAfterHeader = response.headers.get(RetryAfterHeader); + if (!retryAfterHeader) + return; + const date = Date.parse(retryAfterHeader); + const diff = date - Date.now(); + // negative diff would mean a date in the past, so retry asap with 0 milliseconds + return Number.isFinite(diff) ? Math.max(0, diff) : undefined; + } + catch (e) { + return undefined; + } +} +/** + * A response is a retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + */ +export function isThrottlingRetryResponse(response) { + return Number.isFinite(getRetryAfterInMs(response)); +} +export function throttlingRetryStrategy() { + return { + name: "throttlingRetryStrategy", + retry({ response }) { + const retryAfterInMs = getRetryAfterInMs(response); + if (!Number.isFinite(retryAfterInMs)) { + return { skipStrategy: true }; + } + return { + retryAfterInMs, + }; + }, + }; +} +//# sourceMappingURL=throttlingRetryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/util/concat.js b/node_modules/@azure/core-rest-pipeline/dist/browser/util/concat.js new file mode 100644 index 000000000..210130788 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/util/concat.js @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getRawContent } from "./file.js"; +import { isNodeReadableStream, isWebReadableStream } from "./typeGuards.js"; +/** + * Drain the content of the given ReadableStream into a Blob. + * The blob's content may end up in memory or on disk dependent on size. + */ +function drain(stream) { + return new Response(stream).blob(); +} +async function toBlobPart(source) { + if (source instanceof Blob || source instanceof Uint8Array) { + return source; + } + if (isWebReadableStream(source)) { + return drain(source); + } + // If it's not a true Blob, and it's not a Uint8Array, we can assume the source + // is a fake File created by createFileFromStream and we can get the original stream + // using getRawContent. + const rawContent = getRawContent(source); + // Shouldn't happen but guard for it anyway + if (isNodeReadableStream(rawContent)) { + throw new Error("Encountered unexpected type. In the browser, `concat` supports Web ReadableStream, Blob, Uint8Array, and files created using `createFile` only."); + } + return toBlobPart(rawContent); +} +/** + * Utility function that concatenates a set of binary inputs into one combined output. + * + * @param sources - array of sources for the concatenation + * @returns - in Node, a (() =\> NodeJS.ReadableStream) which, when read, produces a concatenation of all the inputs. + * In browser, returns a `Blob` representing all the concatenated inputs. + * + * @internal + */ +export async function concat(sources) { + const parts = []; + for (const source of sources) { + parts.push(await toBlobPart(typeof source === "function" ? source() : source)); + } + return new Blob(parts); +} +//# sourceMappingURL=concat-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/util/file.js b/node_modules/@azure/core-rest-pipeline/dist/browser/util/file.js new file mode 100644 index 000000000..0f271810e --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/util/file.js @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNodeLike } from "@azure/core-util"; +import { isNodeReadableStream } from "./typeGuards.js"; +const unimplementedMethods = { + arrayBuffer: () => { + throw new Error("Not implemented"); + }, + slice: () => { + throw new Error("Not implemented"); + }, + text: () => { + throw new Error("Not implemented"); + }, +}; +/** + * Private symbol used as key on objects created using createFile containing the + * original source of the file object. + * + * This is used in Node to access the original Node stream without using Blob#stream, which + * returns a web stream. This is done to avoid a couple of bugs to do with Blob#stream and + * Readable#to/fromWeb in Node versions we support: + * - https://github.com/nodejs/node/issues/42694 (fixed in Node 18.14) + * - https://github.com/nodejs/node/issues/48916 (fixed in Node 20.6) + * + * Once these versions are no longer supported, we may be able to stop doing this. + * + * @internal + */ +const rawContent = Symbol("rawContent"); +function hasRawContent(x) { + return typeof x[rawContent] === "function"; +} +/** + * Extract the raw content from a given blob-like object. If the input was created using createFile + * or createFileFromStream, the exact content passed into createFile/createFileFromStream will be used. + * For true instances of Blob and File, returns the blob's content as a Web ReadableStream. + * + * @internal + */ +export function getRawContent(blob) { + if (hasRawContent(blob)) { + return blob[rawContent](); + } + else { + return blob.stream(); + } +} +/** + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function to: + * - Create a File object for use in RequestBodyType.formData in environments where the + * global File object is unavailable. + * - Create a File-like object from a readable stream without reading the stream into memory. + * + * @param stream - the content of the file as a callback returning a stream. When a File object made using createFile is + * passed in a request's form data map, the stream will not be read into memory + * and instead will be streamed when the request is made. In the event of a retry, the + * stream needs to be read again, so this callback SHOULD return a fresh stream if possible. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. + */ +export function createFileFromStream(stream, name, options = {}) { + var _a, _b, _c, _d; + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: (_d = options.size) !== null && _d !== void 0 ? _d : -1, name, stream: () => { + const s = stream(); + if (isNodeReadableStream(s)) { + throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); + } + return s; + }, [rawContent]: stream }); +} +/** + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function create a File object for use in RequestBodyType.formData in environments where the global File object is unavailable. + * + * @param content - the content of the file as a Uint8Array in memory. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. + */ +export function createFile(content, name, options = {}) { + var _a, _b, _c; + if (isNodeLike) { + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: content.byteLength, name, arrayBuffer: async () => content.buffer, stream: () => new Blob([content]).stream(), [rawContent]: () => content }); + } + else { + return new File([content], name, options); + } +} +//# sourceMappingURL=file.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/util/helpers.js b/node_modules/@azure/core-rest-pipeline/dist/browser/util/helpers.js new file mode 100644 index 000000000..a1c1183ce --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/util/helpers.js @@ -0,0 +1,58 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +const StandardAbortMessage = "The operation was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * - abortSignal - The abortSignal associated with containing operation. + * - abortErrorMsg - The abort error message associated with containing operation. + * @returns Resolved promise + */ +export function delay(delayInMs, value, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + return reject(new AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (timer) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(value); + }, delayInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); +} +/** + * @internal + * @returns the parsed value or undefined if the parsed value is invalid. + */ +export function parseHeaderValueAsNumber(response, headerName) { + const value = response.headers.get(headerName); + if (!value) + return; + const valueAsNum = Number(value); + if (Number.isNaN(valueAsNum)) + return; + return valueAsNum; +} +//# sourceMappingURL=helpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/util/inspect.js b/node_modules/@azure/core-rest-pipeline/dist/browser/util/inspect.js new file mode 100644 index 000000000..2fea81927 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/util/inspect.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const custom = {}; +//# sourceMappingURL=inspect-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/util/sanitizer.js b/node_modules/@azure/core-rest-pipeline/dist/browser/util/sanitizer.js new file mode 100644 index 000000000..4ea4b25b1 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/util/sanitizer.js @@ -0,0 +1,139 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObject } from "@azure/core-util"; +const RedactedString = "REDACTED"; +// Make sure this list is up-to-date with the one under core/logger/Readme#Keyconcepts +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; +/** + * @internal + */ +export class Sanitizer { + constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [], } = {}) { + allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); + allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + } + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + } + if (key === "headers") { + return this.sanitizeHeaders(value); + } + else if (key === "url") { + return this.sanitizeUrl(value); + } + else if (key === "query") { + return this.sanitizeQuery(value); + } + else if (key === "body") { + // Don't log the request body + return undefined; + } + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || isObject(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeHeaders(obj) { + const sanitized = {}; + for (const key of Object.keys(obj)) { + if (this.allowedHeaderNames.has(key.toLowerCase())) { + sanitized[key] = obj[key]; + } + else { + sanitized[key] = RedactedString; + } + } + return sanitized; + } + sanitizeQuery(value) { + if (typeof value !== "object" || value === null) { + return value; + } + const sanitized = {}; + for (const k of Object.keys(value)) { + if (this.allowedQueryParameters.has(k.toLowerCase())) { + sanitized[k] = value[k]; + } + else { + sanitized[k] = RedactedString; + } + } + return sanitized; + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null) { + return value; + } + const url = new URL(value); + if (!url.search) { + return value; + } + for (const [key] of url.searchParams) { + if (!this.allowedQueryParameters.has(key.toLowerCase())) { + url.searchParams.set(key, RedactedString); + } + } + return url.toString(); + } +} +//# sourceMappingURL=sanitizer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/util/tokenCycler.js b/node_modules/@azure/core-rest-pipeline/dist/browser/util/tokenCycler.js new file mode 100644 index 000000000..514c6cb16 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/util/tokenCycler.js @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { delay } from "./helpers.js"; +// Default options for the cycler if none are provided +export const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires + retryIntervalInMs: 3000, // Allow refresh attempts every 3s + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - A function that produces a promise of an access token that may fail by returning null. + * @param retryIntervalInMs - The time (in milliseconds) to wait between retry attempts. + * @param refreshTimeout - The timestamp after which the refresh attempt will fail, throwing an exception. + * @returns - A promise that, if it resolves, will resolve with an access token. + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < refreshTimeout) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +export function createTokenCycler(credential, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + let tenantId; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(scopes, getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + tenantId = getTokenOptions.tenantId; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + tenantId = undefined; + throw reason; + }); + } + return refreshWorker; + } + return async (scopes, tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + // If the tenantId passed in token options is different to the one we have + // Or if we are in claim challenge and the token was rejected and a new access token need to be issued, we need to + // refresh the token with the new tenantId or token. + const mustRefresh = tenantId !== tokenOptions.tenantId || Boolean(tokenOptions.claims) || cycler.mustRefresh; + if (mustRefresh) + return refresh(scopes, tokenOptions); + if (cycler.shouldRefresh) { + refresh(scopes, tokenOptions); + } + return token; + }; +} +//# sourceMappingURL=tokenCycler.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/util/typeGuards.js b/node_modules/@azure/core-rest-pipeline/dist/browser/util/typeGuards.js new file mode 100644 index 000000000..2baffad6a --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/util/typeGuards.js @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function isNodeReadableStream(x) { + return Boolean(x && typeof x["pipe"] === "function"); +} +export function isWebReadableStream(x) { + return Boolean(x && + typeof x.getReader === "function" && + typeof x.tee === "function"); +} +export function isReadableStream(x) { + return isNodeReadableStream(x) || isWebReadableStream(x); +} +export function isBlob(x) { + return typeof x.stream === "function"; +} +//# sourceMappingURL=typeGuards.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/util/userAgent.js b/node_modules/@azure/core-rest-pipeline/dist/browser/util/userAgent.js new file mode 100644 index 000000000..afdddc19b --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/util/userAgent.js @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getHeaderName, setPlatformSpecificData } from "./userAgentPlatform.js"; +import { SDK_VERSION } from "../constants.js"; +function getUserAgentString(telemetryInfo) { + const parts = []; + for (const [key, value] of telemetryInfo) { + const token = value ? `${key}/${value}` : key; + parts.push(token); + } + return parts.join(" "); +} +/** + * @internal + */ +export function getUserAgentHeaderName() { + return getHeaderName(); +} +/** + * @internal + */ +export function getUserAgentValue(prefix) { + const runtimeInfo = new Map(); + runtimeInfo.set("core-rest-pipeline", SDK_VERSION); + setPlatformSpecificData(runtimeInfo); + const defaultAgent = getUserAgentString(runtimeInfo); + const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; + return userAgentValue; +} +//# sourceMappingURL=userAgent.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/util/userAgentPlatform.js b/node_modules/@azure/core-rest-pipeline/dist/browser/util/userAgentPlatform.js new file mode 100644 index 000000000..23ac973b9 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/util/userAgentPlatform.js @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/* + * NOTE: When moving this file, please update "browser" section in package.json. + */ +/** + * @internal + */ +export function getHeaderName() { + return "x-ms-useragent"; +} +/** + * @internal + */ +export function setPlatformSpecificData(map) { + var _a, _b, _c; + const localNavigator = globalThis.navigator; + map.set("OS", ((_c = (_b = (_a = localNavigator === null || localNavigator === void 0 ? void 0 : localNavigator.userAgentData) === null || _a === void 0 ? void 0 : _a.platform) !== null && _b !== void 0 ? _b : localNavigator === null || localNavigator === void 0 ? void 0 : localNavigator.platform) !== null && _c !== void 0 ? _c : "unknown").replace(" ", "")); +} +//# sourceMappingURL=userAgentPlatform-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/browser/xhrHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/browser/xhrHttpClient.js new file mode 100644 index 000000000..925d1d6ba --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/browser/xhrHttpClient.js @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +import { createHttpHeaders } from "./httpHeaders.js"; +import { RestError } from "./restError.js"; +import { isReadableStream } from "./util/typeGuards.js"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + * @internal + */ +class XhrHttpClient { + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + var _a; + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + const xhr = new XMLHttpRequest(); + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + const abortSignal = request.abortSignal; + if (abortSignal) { + if (abortSignal.aborted) { + throw new AbortError("The operation was aborted."); + } + const listener = () => { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener); + xhr.addEventListener("readystatechange", () => { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (const [name, value] of request.headers) { + xhr.setRequestHeader(name, value); + } + xhr.responseType = ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.size) ? "blob" : "text"; + const body = typeof request.body === "function" ? request.body() : request.body; + if (isReadableStream(body)) { + throw new Error("streams are not supported in XhrHttpClient."); + } + xhr.send(body === undefined ? null : body); + if (xhr.responseType === "blob") { + return new Promise((resolve, reject) => { + handleBlobResponse(xhr, request, resolve, reject); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", () => resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + })); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + } +} +function handleBlobResponse(xhr, request, res, rej) { + xhr.addEventListener("readystatechange", () => { + var _a, _b; + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(Number.POSITIVE_INFINITY)) || + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(xhr.status))) { + const blobBody = new Promise((resolve, reject) => { + xhr.addEventListener("load", () => { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody, + }); + } + else { + xhr.addEventListener("load", () => { + // xhr.response is of Blob type if the request is sent with xhr.responseType === "blob" + // but the status code is not one of the stream response status codes, + // so treat it as text and convert from Blob to text + if (xhr.response) { + xhr.response + .text() + .then((text) => { + res({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: text, + }); + return; + }) + .catch((e) => { + rej(e); + }); + } + else { + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + }); + } + }); + } + } + }); +} +function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", (rawEvent) => listener({ + loadedBytes: rawEvent.loaded, + })); + } +} +function parseHeaders(xhr) { + const responseHeaders = createHttpHeaders(); + const headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (const line of headerLines) { + const index = line.indexOf(":"); + const headerName = line.slice(0, index); + const headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} +function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", () => reject(new RestError(`Failed to send request to ${request.url}`, { + code: RestError.REQUEST_SEND_ERROR, + request, + }))); + const abortError = new AbortError("The operation was aborted."); + xhr.addEventListener("abort", () => reject(abortError)); + xhr.addEventListener("timeout", () => reject(abortError)); +} +/** + * Create a new HttpClient instance for the browser environment. + * @internal + */ +export function createXhrHttpClient() { + return new XhrHttpClient(); +} +//# sourceMappingURL=xhrHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/accessTokenCache.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/accessTokenCache.js new file mode 100644 index 000000000..be1f82170 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/accessTokenCache.js @@ -0,0 +1,36 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ExpiringAccessTokenCache = exports.DefaultTokenRefreshBufferMs = void 0; +/** + * Defines the default token refresh buffer duration. + */ +exports.DefaultTokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes +/** + * Provides an AccessTokenCache implementation which clears + * the cached AccessToken's after the expiresOnTimestamp has + * passed. + * @internal + */ +class ExpiringAccessTokenCache { + /** + * Constructs an instance of ExpiringAccessTokenCache with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs = exports.DefaultTokenRefreshBufferMs) { + this.tokenRefreshBufferMs = tokenRefreshBufferMs; + } + setCachedToken(accessToken) { + this.cachedToken = accessToken; + } + getCachedToken() { + if (this.cachedToken && + Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { + this.cachedToken = undefined; + } + return this.cachedToken; + } +} +exports.ExpiringAccessTokenCache = ExpiringAccessTokenCache; +//# sourceMappingURL=accessTokenCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/constants.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/constants.js new file mode 100644 index 000000000..eb8aa4b8b --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/constants.js @@ -0,0 +1,8 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.DEFAULT_RETRY_POLICY_COUNT = exports.SDK_VERSION = void 0; +exports.SDK_VERSION = "1.16.0"; +exports.DEFAULT_RETRY_POLICY_COUNT = 3; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/createPipelineFromOptions.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/createPipelineFromOptions.js new file mode 100644 index 000000000..04a2dd4d7 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/createPipelineFromOptions.js @@ -0,0 +1,51 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createPipelineFromOptions = void 0; +const logPolicy_js_1 = require("./policies/logPolicy.js"); +const pipeline_js_1 = require("./pipeline.js"); +const redirectPolicy_js_1 = require("./policies/redirectPolicy.js"); +const userAgentPolicy_js_1 = require("./policies/userAgentPolicy.js"); +const multipartPolicy_js_1 = require("./policies/multipartPolicy.js"); +const decompressResponsePolicy_js_1 = require("./policies/decompressResponsePolicy.js"); +const defaultRetryPolicy_js_1 = require("./policies/defaultRetryPolicy.js"); +const formDataPolicy_js_1 = require("./policies/formDataPolicy.js"); +const core_util_1 = require("@azure/core-util"); +const proxyPolicy_js_1 = require("./policies/proxyPolicy.js"); +const setClientRequestIdPolicy_js_1 = require("./policies/setClientRequestIdPolicy.js"); +const tlsPolicy_js_1 = require("./policies/tlsPolicy.js"); +const tracingPolicy_js_1 = require("./policies/tracingPolicy.js"); +/** + * Create a new pipeline with a default set of customizable policies. + * @param options - Options to configure a custom pipeline. + */ +function createPipelineFromOptions(options) { + var _a; + const pipeline = (0, pipeline_js_1.createEmptyPipeline)(); + if (core_util_1.isNodeLike) { + if (options.tlsOptions) { + pipeline.addPolicy((0, tlsPolicy_js_1.tlsPolicy)(options.tlsOptions)); + } + pipeline.addPolicy((0, proxyPolicy_js_1.proxyPolicy)(options.proxyOptions)); + pipeline.addPolicy((0, decompressResponsePolicy_js_1.decompressResponsePolicy)()); + } + pipeline.addPolicy((0, formDataPolicy_js_1.formDataPolicy)(), { beforePolicies: [multipartPolicy_js_1.multipartPolicyName] }); + pipeline.addPolicy((0, userAgentPolicy_js_1.userAgentPolicy)(options.userAgentOptions)); + pipeline.addPolicy((0, setClientRequestIdPolicy_js_1.setClientRequestIdPolicy)((_a = options.telemetryOptions) === null || _a === void 0 ? void 0 : _a.clientRequestIdHeaderName)); + // The multipart policy is added after policies with no phase, so that + // policies can be added between it and formDataPolicy to modify + // properties (e.g., making the boundary constant in recorded tests). + pipeline.addPolicy((0, multipartPolicy_js_1.multipartPolicy)(), { afterPhase: "Deserialize" }); + pipeline.addPolicy((0, defaultRetryPolicy_js_1.defaultRetryPolicy)(options.retryOptions), { phase: "Retry" }); + pipeline.addPolicy((0, tracingPolicy_js_1.tracingPolicy)(options.userAgentOptions), { afterPhase: "Retry" }); + if (core_util_1.isNodeLike) { + // Both XHR and Fetch expect to handle redirects automatically, + // so only include this policy when we're in Node. + pipeline.addPolicy((0, redirectPolicy_js_1.redirectPolicy)(options.redirectOptions), { afterPhase: "Retry" }); + } + pipeline.addPolicy((0, logPolicy_js_1.logPolicy)(options.loggingOptions), { afterPhase: "Sign" }); + return pipeline; +} +exports.createPipelineFromOptions = createPipelineFromOptions; +//# sourceMappingURL=createPipelineFromOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/defaultHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/defaultHttpClient.js new file mode 100644 index 000000000..99f767931 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/defaultHttpClient.js @@ -0,0 +1,14 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createDefaultHttpClient = void 0; +const nodeHttpClient_js_1 = require("./nodeHttpClient.js"); +/** + * Create the correct HttpClient for the current environment. + */ +function createDefaultHttpClient() { + return (0, nodeHttpClient_js_1.createNodeHttpClient)(); +} +exports.createDefaultHttpClient = createDefaultHttpClient; +//# sourceMappingURL=defaultHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/fetchHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/fetchHttpClient.js new file mode 100644 index 000000000..578ff4de2 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/fetchHttpClient.js @@ -0,0 +1,260 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFetchHttpClient = void 0; +const abort_controller_1 = require("@azure/abort-controller"); +const restError_js_1 = require("./restError.js"); +const httpHeaders_js_1 = require("./httpHeaders.js"); +const typeGuards_js_1 = require("./util/typeGuards.js"); +/** + * Checks if the body is a Blob or Blob-like + */ +function isBlob(body) { + // File objects count as a type of Blob, so we want to use instanceof explicitly + return (typeof Blob === "function" || typeof Blob === "object") && body instanceof Blob; +} +/** + * A HttpClient implementation that uses window.fetch to send HTTP requests. + * @internal + */ +class FetchHttpClient { + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + try { + return await makeRequest(request); + } + catch (e) { + throw getError(e, request); + } + } +} +/** + * Sends a request + */ +async function makeRequest(request) { + const { abortController, abortControllerCleanup } = setupAbortSignal(request); + try { + const headers = buildFetchHeaders(request.headers); + const { streaming, body: requestBody } = buildRequestBody(request); + const requestInit = Object.assign(Object.assign({ body: requestBody, method: request.method, headers: headers, signal: abortController.signal }, ("credentials" in Request.prototype + ? { credentials: request.withCredentials ? "include" : "same-origin" } + : {})), ("cache" in Request.prototype ? { cache: "no-store" } : {})); + // According to https://fetch.spec.whatwg.org/#fetch-method, + // init.duplex must be set when body is a ReadableStream object. + // currently "half" is the only valid value. + if (streaming) { + requestInit.duplex = "half"; + } + /** + * Developers of the future: + * Do not set redirect: "manual" as part + * of request options. + * It will not work as you expect. + */ + const response = await fetch(request.url, requestInit); + // If we're uploading a blob, we need to fire the progress event manually + if (isBlob(request.body) && request.onUploadProgress) { + request.onUploadProgress({ loadedBytes: request.body.size }); + } + return buildPipelineResponse(response, request, abortControllerCleanup); + } + catch (e) { + abortControllerCleanup === null || abortControllerCleanup === void 0 ? void 0 : abortControllerCleanup(); + throw e; + } +} +/** + * Creates a pipeline response from a Fetch response; + */ +async function buildPipelineResponse(httpResponse, request, abortControllerCleanup) { + var _a, _b; + const headers = buildPipelineHeaders(httpResponse); + const response = { + request, + headers, + status: httpResponse.status, + }; + const bodyStream = (0, typeGuards_js_1.isWebReadableStream)(httpResponse.body) + ? buildBodyStream(httpResponse.body, { + onProgress: request.onDownloadProgress, + onEnd: abortControllerCleanup, + }) + : httpResponse.body; + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(Number.POSITIVE_INFINITY)) || + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(response.status))) { + if (request.enableBrowserStreams) { + response.browserStreamBody = bodyStream !== null && bodyStream !== void 0 ? bodyStream : undefined; + } + else { + const responseStream = new Response(bodyStream); + response.blobBody = responseStream.blob(); + abortControllerCleanup === null || abortControllerCleanup === void 0 ? void 0 : abortControllerCleanup(); + } + } + else { + const responseStream = new Response(bodyStream); + response.bodyAsText = await responseStream.text(); + abortControllerCleanup === null || abortControllerCleanup === void 0 ? void 0 : abortControllerCleanup(); + } + return response; +} +function setupAbortSignal(request) { + const abortController = new AbortController(); + // Cleanup function + let abortControllerCleanup; + /** + * Attach an abort listener to the request + */ + let abortListener; + if (request.abortSignal) { + if (request.abortSignal.aborted) { + throw new abort_controller_1.AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + request.abortSignal.addEventListener("abort", abortListener); + abortControllerCleanup = () => { + var _a; + if (abortListener) { + (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + } + }; + } + // If a timeout was passed, call the abort signal once the time elapses + if (request.timeout > 0) { + setTimeout(() => { + abortController.abort(); + }, request.timeout); + } + return { abortController, abortControllerCleanup }; +} +/** + * Gets the specific error + */ +// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters +function getError(e, request) { + var _a; + if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { + return e; + } + else { + return new restError_js_1.RestError(`Error sending request: ${e.message}`, { + code: (_a = e === null || e === void 0 ? void 0 : e.code) !== null && _a !== void 0 ? _a : restError_js_1.RestError.REQUEST_SEND_ERROR, + request, + }); + } +} +/** + * Converts PipelineRequest headers to Fetch headers + */ +function buildFetchHeaders(pipelineHeaders) { + const headers = new Headers(); + for (const [name, value] of pipelineHeaders) { + headers.append(name, value); + } + return headers; +} +function buildPipelineHeaders(httpResponse) { + const responseHeaders = (0, httpHeaders_js_1.createHttpHeaders)(); + for (const [name, value] of httpResponse.headers) { + responseHeaders.set(name, value); + } + return responseHeaders; +} +function buildRequestBody(request) { + const body = typeof request.body === "function" ? request.body() : request.body; + if ((0, typeGuards_js_1.isNodeReadableStream)(body)) { + throw new Error("Node streams are not supported in browser environment."); + } + return (0, typeGuards_js_1.isWebReadableStream)(body) + ? { streaming: true, body: buildBodyStream(body, { onProgress: request.onUploadProgress }) } + : { streaming: false, body }; +} +/** + * Reads the request/response original stream and stream it through a new + * ReadableStream, this is done to be able to report progress in a way that + * all modern browsers support. TransformStreams would be an alternative, + * however they are not yet supported by all browsers i.e Firefox + */ +function buildBodyStream(readableStream, options = {}) { + let loadedBytes = 0; + const { onProgress, onEnd } = options; + // If the current browser supports pipeThrough we use a TransformStream + // to report progress + if (isTransformStreamSupported(readableStream)) { + return readableStream.pipeThrough(new TransformStream({ + transform(chunk, controller) { + if (chunk === null) { + controller.terminate(); + return; + } + controller.enqueue(chunk); + loadedBytes += chunk.length; + if (onProgress) { + onProgress({ loadedBytes }); + } + }, + flush() { + onEnd === null || onEnd === void 0 ? void 0 : onEnd(); + }, + })); + } + else { + // If we can't use transform streams, wrap the original stream in a new readable stream + // and use pull to enqueue each chunk and report progress. + const reader = readableStream.getReader(); + return new ReadableStream({ + async pull(controller) { + var _a; + const { done, value } = await reader.read(); + // When no more data needs to be consumed, break the reading + if (done || !value) { + onEnd === null || onEnd === void 0 ? void 0 : onEnd(); + // Close the stream + controller.close(); + reader.releaseLock(); + return; + } + loadedBytes += (_a = value === null || value === void 0 ? void 0 : value.length) !== null && _a !== void 0 ? _a : 0; + // Enqueue the next data chunk into our target stream + controller.enqueue(value); + if (onProgress) { + onProgress({ loadedBytes }); + } + }, + cancel(reason) { + onEnd === null || onEnd === void 0 ? void 0 : onEnd(); + return reader.cancel(reason); + }, + }); + } +} +/** + * Create a new HttpClient instance for the browser environment. + * @internal + */ +function createFetchHttpClient() { + return new FetchHttpClient(); +} +exports.createFetchHttpClient = createFetchHttpClient; +function isTransformStreamSupported(readableStream) { + return readableStream.pipeThrough !== undefined && self.TransformStream !== undefined; +} +//# sourceMappingURL=fetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/httpHeaders.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/httpHeaders.js new file mode 100644 index 000000000..c86e0b72e --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/httpHeaders.js @@ -0,0 +1,93 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createHttpHeaders = void 0; +function normalizeName(name) { + return name.toLowerCase(); +} +function* headerIterator(map) { + for (const entry of map.values()) { + yield [entry.name, entry.value]; + } +} +class HttpHeadersImpl { + constructor(rawHeaders) { + this._headersMap = new Map(); + if (rawHeaders) { + for (const headerName of Object.keys(rawHeaders)) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param name - The name of the header to set. This value is case-insensitive. + * @param value - The value of the header to set. + */ + set(name, value) { + this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param name - The name of the header. This value is case-insensitive. + */ + get(name) { + var _a; + return (_a = this._headersMap.get(normalizeName(name))) === null || _a === void 0 ? void 0 : _a.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + * @param name - The name of the header to set. This value is case-insensitive. + */ + has(name) { + return this._headersMap.has(normalizeName(name)); + } + /** + * Remove the header with the provided headerName. + * @param name - The name of the header to remove. + */ + delete(name) { + this._headersMap.delete(normalizeName(name)); + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJSON(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const entry of this._headersMap.values()) { + result[entry.name] = entry.value; + } + } + else { + for (const [normalizedName, entry] of this._headersMap) { + result[normalizedName] = entry.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJSON({ preserveCase: true })); + } + /** + * Iterate over tuples of header [name, value] pairs. + */ + [Symbol.iterator]() { + return headerIterator(this._headersMap); + } +} +/** + * Creates an object that satisfies the `HttpHeaders` interface. + * @param rawHeaders - A simple object representing initial headers + */ +function createHttpHeaders(rawHeaders) { + return new HttpHeadersImpl(rawHeaders); +} +exports.createHttpHeaders = createHttpHeaders; +//# sourceMappingURL=httpHeaders.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/index.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/index.js new file mode 100644 index 000000000..05f192d64 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/index.js @@ -0,0 +1,75 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFileFromStream = exports.createFile = exports.auxiliaryAuthenticationHeaderPolicyName = exports.auxiliaryAuthenticationHeaderPolicy = exports.ndJsonPolicyName = exports.ndJsonPolicy = exports.bearerTokenAuthenticationPolicyName = exports.bearerTokenAuthenticationPolicy = exports.formDataPolicyName = exports.formDataPolicy = exports.tlsPolicyName = exports.tlsPolicy = exports.userAgentPolicyName = exports.userAgentPolicy = exports.defaultRetryPolicy = exports.tracingPolicyName = exports.tracingPolicy = exports.retryPolicy = exports.throttlingRetryPolicyName = exports.throttlingRetryPolicy = exports.systemErrorRetryPolicyName = exports.systemErrorRetryPolicy = exports.redirectPolicyName = exports.redirectPolicy = exports.getDefaultProxySettings = exports.proxyPolicyName = exports.proxyPolicy = exports.multipartPolicyName = exports.multipartPolicy = exports.logPolicyName = exports.logPolicy = exports.setClientRequestIdPolicyName = exports.setClientRequestIdPolicy = exports.exponentialRetryPolicyName = exports.exponentialRetryPolicy = exports.decompressResponsePolicyName = exports.decompressResponsePolicy = exports.isRestError = exports.RestError = exports.createPipelineRequest = exports.createHttpHeaders = exports.createDefaultHttpClient = exports.createPipelineFromOptions = exports.createEmptyPipeline = void 0; +var pipeline_js_1 = require("./pipeline.js"); +Object.defineProperty(exports, "createEmptyPipeline", { enumerable: true, get: function () { return pipeline_js_1.createEmptyPipeline; } }); +var createPipelineFromOptions_js_1 = require("./createPipelineFromOptions.js"); +Object.defineProperty(exports, "createPipelineFromOptions", { enumerable: true, get: function () { return createPipelineFromOptions_js_1.createPipelineFromOptions; } }); +var defaultHttpClient_js_1 = require("./defaultHttpClient.js"); +Object.defineProperty(exports, "createDefaultHttpClient", { enumerable: true, get: function () { return defaultHttpClient_js_1.createDefaultHttpClient; } }); +var httpHeaders_js_1 = require("./httpHeaders.js"); +Object.defineProperty(exports, "createHttpHeaders", { enumerable: true, get: function () { return httpHeaders_js_1.createHttpHeaders; } }); +var pipelineRequest_js_1 = require("./pipelineRequest.js"); +Object.defineProperty(exports, "createPipelineRequest", { enumerable: true, get: function () { return pipelineRequest_js_1.createPipelineRequest; } }); +var restError_js_1 = require("./restError.js"); +Object.defineProperty(exports, "RestError", { enumerable: true, get: function () { return restError_js_1.RestError; } }); +Object.defineProperty(exports, "isRestError", { enumerable: true, get: function () { return restError_js_1.isRestError; } }); +var decompressResponsePolicy_js_1 = require("./policies/decompressResponsePolicy.js"); +Object.defineProperty(exports, "decompressResponsePolicy", { enumerable: true, get: function () { return decompressResponsePolicy_js_1.decompressResponsePolicy; } }); +Object.defineProperty(exports, "decompressResponsePolicyName", { enumerable: true, get: function () { return decompressResponsePolicy_js_1.decompressResponsePolicyName; } }); +var exponentialRetryPolicy_js_1 = require("./policies/exponentialRetryPolicy.js"); +Object.defineProperty(exports, "exponentialRetryPolicy", { enumerable: true, get: function () { return exponentialRetryPolicy_js_1.exponentialRetryPolicy; } }); +Object.defineProperty(exports, "exponentialRetryPolicyName", { enumerable: true, get: function () { return exponentialRetryPolicy_js_1.exponentialRetryPolicyName; } }); +var setClientRequestIdPolicy_js_1 = require("./policies/setClientRequestIdPolicy.js"); +Object.defineProperty(exports, "setClientRequestIdPolicy", { enumerable: true, get: function () { return setClientRequestIdPolicy_js_1.setClientRequestIdPolicy; } }); +Object.defineProperty(exports, "setClientRequestIdPolicyName", { enumerable: true, get: function () { return setClientRequestIdPolicy_js_1.setClientRequestIdPolicyName; } }); +var logPolicy_js_1 = require("./policies/logPolicy.js"); +Object.defineProperty(exports, "logPolicy", { enumerable: true, get: function () { return logPolicy_js_1.logPolicy; } }); +Object.defineProperty(exports, "logPolicyName", { enumerable: true, get: function () { return logPolicy_js_1.logPolicyName; } }); +var multipartPolicy_js_1 = require("./policies/multipartPolicy.js"); +Object.defineProperty(exports, "multipartPolicy", { enumerable: true, get: function () { return multipartPolicy_js_1.multipartPolicy; } }); +Object.defineProperty(exports, "multipartPolicyName", { enumerable: true, get: function () { return multipartPolicy_js_1.multipartPolicyName; } }); +var proxyPolicy_js_1 = require("./policies/proxyPolicy.js"); +Object.defineProperty(exports, "proxyPolicy", { enumerable: true, get: function () { return proxyPolicy_js_1.proxyPolicy; } }); +Object.defineProperty(exports, "proxyPolicyName", { enumerable: true, get: function () { return proxyPolicy_js_1.proxyPolicyName; } }); +Object.defineProperty(exports, "getDefaultProxySettings", { enumerable: true, get: function () { return proxyPolicy_js_1.getDefaultProxySettings; } }); +var redirectPolicy_js_1 = require("./policies/redirectPolicy.js"); +Object.defineProperty(exports, "redirectPolicy", { enumerable: true, get: function () { return redirectPolicy_js_1.redirectPolicy; } }); +Object.defineProperty(exports, "redirectPolicyName", { enumerable: true, get: function () { return redirectPolicy_js_1.redirectPolicyName; } }); +var systemErrorRetryPolicy_js_1 = require("./policies/systemErrorRetryPolicy.js"); +Object.defineProperty(exports, "systemErrorRetryPolicy", { enumerable: true, get: function () { return systemErrorRetryPolicy_js_1.systemErrorRetryPolicy; } }); +Object.defineProperty(exports, "systemErrorRetryPolicyName", { enumerable: true, get: function () { return systemErrorRetryPolicy_js_1.systemErrorRetryPolicyName; } }); +var throttlingRetryPolicy_js_1 = require("./policies/throttlingRetryPolicy.js"); +Object.defineProperty(exports, "throttlingRetryPolicy", { enumerable: true, get: function () { return throttlingRetryPolicy_js_1.throttlingRetryPolicy; } }); +Object.defineProperty(exports, "throttlingRetryPolicyName", { enumerable: true, get: function () { return throttlingRetryPolicy_js_1.throttlingRetryPolicyName; } }); +var retryPolicy_js_1 = require("./policies/retryPolicy.js"); +Object.defineProperty(exports, "retryPolicy", { enumerable: true, get: function () { return retryPolicy_js_1.retryPolicy; } }); +var tracingPolicy_js_1 = require("./policies/tracingPolicy.js"); +Object.defineProperty(exports, "tracingPolicy", { enumerable: true, get: function () { return tracingPolicy_js_1.tracingPolicy; } }); +Object.defineProperty(exports, "tracingPolicyName", { enumerable: true, get: function () { return tracingPolicy_js_1.tracingPolicyName; } }); +var defaultRetryPolicy_js_1 = require("./policies/defaultRetryPolicy.js"); +Object.defineProperty(exports, "defaultRetryPolicy", { enumerable: true, get: function () { return defaultRetryPolicy_js_1.defaultRetryPolicy; } }); +var userAgentPolicy_js_1 = require("./policies/userAgentPolicy.js"); +Object.defineProperty(exports, "userAgentPolicy", { enumerable: true, get: function () { return userAgentPolicy_js_1.userAgentPolicy; } }); +Object.defineProperty(exports, "userAgentPolicyName", { enumerable: true, get: function () { return userAgentPolicy_js_1.userAgentPolicyName; } }); +var tlsPolicy_js_1 = require("./policies/tlsPolicy.js"); +Object.defineProperty(exports, "tlsPolicy", { enumerable: true, get: function () { return tlsPolicy_js_1.tlsPolicy; } }); +Object.defineProperty(exports, "tlsPolicyName", { enumerable: true, get: function () { return tlsPolicy_js_1.tlsPolicyName; } }); +var formDataPolicy_js_1 = require("./policies/formDataPolicy.js"); +Object.defineProperty(exports, "formDataPolicy", { enumerable: true, get: function () { return formDataPolicy_js_1.formDataPolicy; } }); +Object.defineProperty(exports, "formDataPolicyName", { enumerable: true, get: function () { return formDataPolicy_js_1.formDataPolicyName; } }); +var bearerTokenAuthenticationPolicy_js_1 = require("./policies/bearerTokenAuthenticationPolicy.js"); +Object.defineProperty(exports, "bearerTokenAuthenticationPolicy", { enumerable: true, get: function () { return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicy; } }); +Object.defineProperty(exports, "bearerTokenAuthenticationPolicyName", { enumerable: true, get: function () { return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicyName; } }); +var ndJsonPolicy_js_1 = require("./policies/ndJsonPolicy.js"); +Object.defineProperty(exports, "ndJsonPolicy", { enumerable: true, get: function () { return ndJsonPolicy_js_1.ndJsonPolicy; } }); +Object.defineProperty(exports, "ndJsonPolicyName", { enumerable: true, get: function () { return ndJsonPolicy_js_1.ndJsonPolicyName; } }); +var auxiliaryAuthenticationHeaderPolicy_js_1 = require("./policies/auxiliaryAuthenticationHeaderPolicy.js"); +Object.defineProperty(exports, "auxiliaryAuthenticationHeaderPolicy", { enumerable: true, get: function () { return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicy; } }); +Object.defineProperty(exports, "auxiliaryAuthenticationHeaderPolicyName", { enumerable: true, get: function () { return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicyName; } }); +var file_js_1 = require("./util/file.js"); +Object.defineProperty(exports, "createFile", { enumerable: true, get: function () { return file_js_1.createFile; } }); +Object.defineProperty(exports, "createFileFromStream", { enumerable: true, get: function () { return file_js_1.createFileFromStream; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/interfaces.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/interfaces.js new file mode 100644 index 000000000..16f2397b5 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/interfaces.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/log.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/log.js new file mode 100644 index 000000000..9bd2b97e6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/log.js @@ -0,0 +1,8 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.logger = void 0; +const logger_1 = require("@azure/logger"); +exports.logger = (0, logger_1.createClientLogger)("core-rest-pipeline"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/nodeHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/nodeHttpClient.js new file mode 100644 index 000000000..9850d26fc --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/nodeHttpClient.js @@ -0,0 +1,338 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createNodeHttpClient = exports.getBodyLength = void 0; +const tslib_1 = require("tslib"); +const http = tslib_1.__importStar(require("node:http")); +const https = tslib_1.__importStar(require("node:https")); +const zlib = tslib_1.__importStar(require("node:zlib")); +const node_stream_1 = require("node:stream"); +const abort_controller_1 = require("@azure/abort-controller"); +const httpHeaders_js_1 = require("./httpHeaders.js"); +const restError_js_1 = require("./restError.js"); +const log_js_1 = require("./log.js"); +const DEFAULT_TLS_SETTINGS = {}; +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream) { + return new Promise((resolve) => { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} +function isArrayBuffer(body) { + return body && typeof body.byteLength === "number"; +} +class ReportTransform extends node_stream_1.Transform { + // eslint-disable-next-line @typescript-eslint/ban-types + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + try { + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(); + } + catch (e) { + callback(e); + } + } + constructor(progressCallback) { + super(); + this.loadedBytes = 0; + this.progressCallback = progressCallback; + } +} +/** + * A HttpClient implementation that uses Node's "https" module to send HTTPS requests. + * @internal + */ +class NodeHttpClient { + constructor() { + this.cachedHttpsAgents = new WeakMap(); + } + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + var _a, _b, _c; + const abortController = new AbortController(); + let abortListener; + if (request.abortSignal) { + if (request.abortSignal.aborted) { + throw new abort_controller_1.AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + request.abortSignal.addEventListener("abort", abortListener); + } + if (request.timeout > 0) { + setTimeout(() => { + abortController.abort(); + }, request.timeout); + } + const acceptEncoding = request.headers.get("Accept-Encoding"); + const shouldDecompress = (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("gzip")) || (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("deflate")); + let body = typeof request.body === "function" ? request.body() : request.body; + if (body && !request.headers.has("Content-Length")) { + const bodyLength = getBodyLength(body); + if (bodyLength !== null) { + request.headers.set("Content-Length", bodyLength); + } + } + let responseStream; + try { + if (body && request.onUploadProgress) { + const onUploadProgress = request.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + uploadReportStream.on("error", (e) => { + log_js_1.logger.error("Error in upload progress", e); + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const res = await this.makeRequest(request, abortController, body); + const headers = getResponseHeaders(res); + const status = (_a = res.statusCode) !== null && _a !== void 0 ? _a : 0; + const response = { + status, + headers, + request, + }; + // Responses to HEAD must not have a body. + // If they do return a body, that body must be ignored. + if (request.method === "HEAD") { + // call resume() and not destroy() to avoid closing the socket + // and losing keep alive + res.resume(); + return response; + } + responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; + const onDownloadProgress = request.onDownloadProgress; + if (onDownloadProgress) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + downloadReportStream.on("error", (e) => { + log_js_1.logger.error("Error in download progress", e); + }); + responseStream.pipe(downloadReportStream); + responseStream = downloadReportStream; + } + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(Number.POSITIVE_INFINITY)) || + ((_c = request.streamResponseStatusCodes) === null || _c === void 0 ? void 0 : _c.has(response.status))) { + response.readableStreamBody = responseStream; + } + else { + response.bodyAsText = await streamToText(responseStream); + } + return response; + } + finally { + // clean up event listener + if (request.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(responseStream)) { + downloadStreamDone = isStreamComplete(responseStream); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + var _a; + // eslint-disable-next-line promise/always-return + if (abortListener) { + (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + } + }) + .catch((e) => { + log_js_1.logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + makeRequest(request, abortController, body) { + var _a; + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + const agent = (_a = request.agent) !== null && _a !== void 0 ? _a : this.getOrCreateAgent(request, isInsecure); + const options = { + agent, + hostname: url.hostname, + path: `${url.pathname}${url.search}`, + port: url.port, + method: request.method, + headers: request.headers.toJSON({ preserveCase: true }), + }; + return new Promise((resolve, reject) => { + const req = isInsecure ? http.request(options, resolve) : https.request(options, resolve); + req.once("error", (err) => { + var _a; + reject(new restError_js_1.RestError(err.message, { code: (_a = err.code) !== null && _a !== void 0 ? _a : restError_js_1.RestError.REQUEST_SEND_ERROR, request })); + }); + abortController.signal.addEventListener("abort", () => { + const abortError = new abort_controller_1.AbortError("The operation was aborted."); + req.destroy(abortError); + reject(abortError); + }); + if (body && isReadableStream(body)) { + body.pipe(req); + } + else if (body) { + if (typeof body === "string" || Buffer.isBuffer(body)) { + req.end(body); + } + else if (isArrayBuffer(body)) { + req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); + } + else { + log_js_1.logger.error("Unrecognized body type", body); + reject(new restError_js_1.RestError("Unrecognized body type")); + } + } + else { + // streams don't like "undefined" being passed as data + req.end(); + } + }); + } + getOrCreateAgent(request, isInsecure) { + var _a; + const disableKeepAlive = request.disableKeepAlive; + // Handle Insecure requests first + if (isInsecure) { + if (disableKeepAlive) { + // keepAlive:false is the default so we don't need a custom Agent + return http.globalAgent; + } + if (!this.cachedHttpAgent) { + // If there is no cached agent create a new one and cache it. + this.cachedHttpAgent = new http.Agent({ keepAlive: true }); + } + return this.cachedHttpAgent; + } + else { + if (disableKeepAlive && !request.tlsSettings) { + // When there are no tlsSettings and keepAlive is false + // we don't need a custom agent + return https.globalAgent; + } + // We use the tlsSettings to index cached clients + const tlsSettings = (_a = request.tlsSettings) !== null && _a !== void 0 ? _a : DEFAULT_TLS_SETTINGS; + // Get the cached agent or create a new one with the + // provided values for keepAlive and tlsSettings + let agent = this.cachedHttpsAgents.get(tlsSettings); + if (agent && agent.options.keepAlive === !disableKeepAlive) { + return agent; + } + log_js_1.logger.info("No cached TLS Agent exist, creating a new Agent"); + agent = new https.Agent(Object.assign({ + // keepAlive is true if disableKeepAlive is false. + keepAlive: !disableKeepAlive }, tlsSettings)); + this.cachedHttpsAgents.set(tlsSettings, agent); + return agent; + } + } +} +function getResponseHeaders(res) { + const headers = (0, httpHeaders_js_1.createHttpHeaders)(); + for (const header of Object.keys(res.headers)) { + const value = res.headers[header]; + if (Array.isArray(value)) { + if (value.length > 0) { + headers.set(header, value[0]); + } + } + else if (value) { + headers.set(header, value); + } + } + return headers; +} +function getDecodedResponseStream(stream, headers) { + const contentEncoding = headers.get("Content-Encoding"); + if (contentEncoding === "gzip") { + const unzip = zlib.createGunzip(); + stream.pipe(unzip); + return unzip; + } + else if (contentEncoding === "deflate") { + const inflate = zlib.createInflate(); + stream.pipe(inflate); + return inflate; + } + return stream; +} +function streamToText(stream) { + return new Promise((resolve, reject) => { + const buffer = []; + stream.on("data", (chunk) => { + if (Buffer.isBuffer(chunk)) { + buffer.push(chunk); + } + else { + buffer.push(Buffer.from(chunk)); + } + }); + stream.on("end", () => { + resolve(Buffer.concat(buffer).toString("utf8")); + }); + stream.on("error", (e) => { + if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { + reject(e); + } + else { + reject(new restError_js_1.RestError(`Error reading response as text: ${e.message}`, { + code: restError_js_1.RestError.PARSE_ERROR, + })); + } + }); + }); +} +/** @internal */ +function getBodyLength(body) { + if (!body) { + return 0; + } + else if (Buffer.isBuffer(body)) { + return body.length; + } + else if (isReadableStream(body)) { + return null; + } + else if (isArrayBuffer(body)) { + return body.byteLength; + } + else if (typeof body === "string") { + return Buffer.from(body).length; + } + else { + return null; + } +} +exports.getBodyLength = getBodyLength; +/** + * Create a new HttpClient instance for the NodeJS environment. + * @internal + */ +function createNodeHttpClient() { + return new NodeHttpClient(); +} +exports.createNodeHttpClient = createNodeHttpClient; +//# sourceMappingURL=nodeHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/package.json b/node_modules/@azure/core-rest-pipeline/dist/commonjs/package.json new file mode 100644 index 000000000..5bbefffba --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/pipeline.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/pipeline.js new file mode 100644 index 000000000..54e8b6564 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/pipeline.js @@ -0,0 +1,266 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createEmptyPipeline = void 0; +const ValidPhaseNames = new Set(["Deserialize", "Serialize", "Retry", "Sign"]); +/** + * A private implementation of Pipeline. + * Do not export this class from the package. + * @internal + */ +class HttpPipeline { + constructor(policies) { + var _a; + this._policies = []; + this._policies = (_a = policies === null || policies === void 0 ? void 0 : policies.slice(0)) !== null && _a !== void 0 ? _a : []; + this._orderedPolicies = undefined; + } + addPolicy(policy, options = {}) { + if (options.phase && options.afterPhase) { + throw new Error("Policies inside a phase cannot specify afterPhase."); + } + if (options.phase && !ValidPhaseNames.has(options.phase)) { + throw new Error(`Invalid phase name: ${options.phase}`); + } + if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { + throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); + } + this._policies.push({ + policy, + options, + }); + this._orderedPolicies = undefined; + } + removePolicy(options) { + const removedPolicies = []; + this._policies = this._policies.filter((policyDescriptor) => { + if ((options.name && policyDescriptor.policy.name === options.name) || + (options.phase && policyDescriptor.options.phase === options.phase)) { + removedPolicies.push(policyDescriptor.policy); + return false; + } + else { + return true; + } + }); + this._orderedPolicies = undefined; + return removedPolicies; + } + sendRequest(httpClient, request) { + const policies = this.getOrderedPolicies(); + const pipeline = policies.reduceRight((next, policy) => { + return (req) => { + return policy.sendRequest(req, next); + }; + }, (req) => httpClient.sendRequest(req)); + return pipeline(request); + } + getOrderedPolicies() { + if (!this._orderedPolicies) { + this._orderedPolicies = this.orderPolicies(); + } + return this._orderedPolicies; + } + clone() { + return new HttpPipeline(this._policies); + } + static create() { + return new HttpPipeline(); + } + orderPolicies() { + /** + * The goal of this method is to reliably order pipeline policies + * based on their declared requirements when they were added. + * + * Order is first determined by phase: + * + * 1. Serialize Phase + * 2. Policies not in a phase + * 3. Deserialize Phase + * 4. Retry Phase + * 5. Sign Phase + * + * Within each phase, policies are executed in the order + * they were added unless they were specified to execute + * before/after other policies or after a particular phase. + * + * To determine the final order, we will walk the policy list + * in phase order multiple times until all dependencies are + * satisfied. + * + * `afterPolicies` are the set of policies that must be + * executed before a given policy. This requirement is + * considered satisfied when each of the listed policies + * have been scheduled. + * + * `beforePolicies` are the set of policies that must be + * executed after a given policy. Since this dependency + * can be expressed by converting it into a equivalent + * `afterPolicies` declarations, they are normalized + * into that form for simplicity. + * + * An `afterPhase` dependency is considered satisfied when all + * policies in that phase have scheduled. + * + */ + const result = []; + // Track all policies we know about. + const policyMap = new Map(); + function createPhase(name) { + return { + name, + policies: new Set(), + hasRun: false, + hasAfterPolicies: false, + }; + } + // Track policies for each phase. + const serializePhase = createPhase("Serialize"); + const noPhase = createPhase("None"); + const deserializePhase = createPhase("Deserialize"); + const retryPhase = createPhase("Retry"); + const signPhase = createPhase("Sign"); + // a list of phases in order + const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; + // Small helper function to map phase name to each Phase + function getPhase(phase) { + if (phase === "Retry") { + return retryPhase; + } + else if (phase === "Serialize") { + return serializePhase; + } + else if (phase === "Deserialize") { + return deserializePhase; + } + else if (phase === "Sign") { + return signPhase; + } + else { + return noPhase; + } + } + // First walk each policy and create a node to track metadata. + for (const descriptor of this._policies) { + const policy = descriptor.policy; + const options = descriptor.options; + const policyName = policy.name; + if (policyMap.has(policyName)) { + throw new Error("Duplicate policy names not allowed in pipeline"); + } + const node = { + policy, + dependsOn: new Set(), + dependants: new Set(), + }; + if (options.afterPhase) { + node.afterPhase = getPhase(options.afterPhase); + node.afterPhase.hasAfterPolicies = true; + } + policyMap.set(policyName, node); + const phase = getPhase(options.phase); + phase.policies.add(node); + } + // Now that each policy has a node, connect dependency references. + for (const descriptor of this._policies) { + const { policy, options } = descriptor; + const policyName = policy.name; + const node = policyMap.get(policyName); + if (!node) { + throw new Error(`Missing node for policy ${policyName}`); + } + if (options.afterPolicies) { + for (const afterPolicyName of options.afterPolicies) { + const afterNode = policyMap.get(afterPolicyName); + if (afterNode) { + // Linking in both directions helps later + // when we want to notify dependants. + node.dependsOn.add(afterNode); + afterNode.dependants.add(node); + } + } + } + if (options.beforePolicies) { + for (const beforePolicyName of options.beforePolicies) { + const beforeNode = policyMap.get(beforePolicyName); + if (beforeNode) { + // To execute before another node, make it + // depend on the current node. + beforeNode.dependsOn.add(node); + node.dependants.add(beforeNode); + } + } + } + } + function walkPhase(phase) { + phase.hasRun = true; + // Sets iterate in insertion order + for (const node of phase.policies) { + if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { + // If this node is waiting on a phase to complete, + // we need to skip it for now. + // Even if the phase is empty, we should wait for it + // to be walked to avoid re-ordering policies. + continue; + } + if (node.dependsOn.size === 0) { + // If there's nothing else we're waiting for, we can + // add this policy to the result list. + result.push(node.policy); + // Notify anything that depends on this policy that + // the policy has been scheduled. + for (const dependant of node.dependants) { + dependant.dependsOn.delete(node); + } + policyMap.delete(node.policy.name); + phase.policies.delete(node); + } + } + } + function walkPhases() { + for (const phase of orderedPhases) { + walkPhase(phase); + // if the phase isn't complete + if (phase.policies.size > 0 && phase !== noPhase) { + if (!noPhase.hasRun) { + // Try running noPhase to see if that unblocks this phase next tick. + // This can happen if a phase that happens before noPhase + // is waiting on a noPhase policy to complete. + walkPhase(noPhase); + } + // Don't proceed to the next phase until this phase finishes. + return; + } + if (phase.hasAfterPolicies) { + // Run any policies unblocked by this phase + walkPhase(noPhase); + } + } + } + // Iterate until we've put every node in the result list. + let iteration = 0; + while (policyMap.size > 0) { + iteration++; + const initialResultLength = result.length; + // Keep walking each phase in order until we can order every node. + walkPhases(); + // The result list *should* get at least one larger each time + // after the first full pass. + // Otherwise, we're going to loop forever. + if (result.length <= initialResultLength && iteration > 1) { + throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); + } + } + return result; + } +} +/** + * Creates a totally empty pipeline. + * Useful for testing or creating a custom one. + */ +function createEmptyPipeline() { + return HttpPipeline.create(); +} +exports.createEmptyPipeline = createEmptyPipeline; +//# sourceMappingURL=pipeline.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/pipelineRequest.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/pipelineRequest.js new file mode 100644 index 000000000..3c836b945 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/pipelineRequest.js @@ -0,0 +1,40 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createPipelineRequest = void 0; +const httpHeaders_js_1 = require("./httpHeaders.js"); +const core_util_1 = require("@azure/core-util"); +class PipelineRequestImpl { + constructor(options) { + var _a, _b, _c, _d, _e, _f, _g; + this.url = options.url; + this.body = options.body; + this.headers = (_a = options.headers) !== null && _a !== void 0 ? _a : (0, httpHeaders_js_1.createHttpHeaders)(); + this.method = (_b = options.method) !== null && _b !== void 0 ? _b : "GET"; + this.timeout = (_c = options.timeout) !== null && _c !== void 0 ? _c : 0; + this.multipartBody = options.multipartBody; + this.formData = options.formData; + this.disableKeepAlive = (_d = options.disableKeepAlive) !== null && _d !== void 0 ? _d : false; + this.proxySettings = options.proxySettings; + this.streamResponseStatusCodes = options.streamResponseStatusCodes; + this.withCredentials = (_e = options.withCredentials) !== null && _e !== void 0 ? _e : false; + this.abortSignal = options.abortSignal; + this.tracingOptions = options.tracingOptions; + this.onUploadProgress = options.onUploadProgress; + this.onDownloadProgress = options.onDownloadProgress; + this.requestId = options.requestId || (0, core_util_1.randomUUID)(); + this.allowInsecureConnection = (_f = options.allowInsecureConnection) !== null && _f !== void 0 ? _f : false; + this.enableBrowserStreams = (_g = options.enableBrowserStreams) !== null && _g !== void 0 ? _g : false; + } +} +/** + * Creates a new pipeline request with the given options. + * This method is to allow for the easy setting of default values and not required. + * @param options - The options to create the request with. + */ +function createPipelineRequest(options) { + return new PipelineRequestImpl(options); +} +exports.createPipelineRequest = createPipelineRequest; +//# sourceMappingURL=pipelineRequest.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/auxiliaryAuthenticationHeaderPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/auxiliaryAuthenticationHeaderPolicy.js new file mode 100644 index 000000000..ab472d193 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/auxiliaryAuthenticationHeaderPolicy.js @@ -0,0 +1,67 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.auxiliaryAuthenticationHeaderPolicy = exports.auxiliaryAuthenticationHeaderPolicyName = void 0; +const tokenCycler_js_1 = require("../util/tokenCycler.js"); +const log_js_1 = require("../log.js"); +/** + * The programmatic identifier of the auxiliaryAuthenticationHeaderPolicy. + */ +exports.auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy"; +const AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary"; +async function sendAuthorizeRequest(options) { + var _a, _b; + const { scopes, getAccessToken, request } = options; + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + }; + return (_b = (_a = (await getAccessToken(scopes, getTokenOptions))) === null || _a === void 0 ? void 0 : _a.token) !== null && _b !== void 0 ? _b : ""; +} +/** + * A policy for external tokens to `x-ms-authorization-auxiliary` header. + * This header will be used when creating a cross-tenant application we may need to handle authentication requests + * for resources that are in different tenants. + * You could see [ARM docs](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/authenticate-multi-tenant) for a rundown of how this feature works + */ +function auxiliaryAuthenticationHeaderPolicy(options) { + const { credentials, scopes } = options; + const logger = options.logger || log_js_1.logger; + const tokenCyclerMap = new WeakMap(); + return { + name: exports.auxiliaryAuthenticationHeaderPolicyName, + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs."); + } + if (!credentials || credentials.length === 0) { + logger.info(`${exports.auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`); + return next(request); + } + const tokenPromises = []; + for (const credential of credentials) { + let getAccessToken = tokenCyclerMap.get(credential); + if (!getAccessToken) { + getAccessToken = (0, tokenCycler_js_1.createTokenCycler)(credential); + tokenCyclerMap.set(credential, getAccessToken); + } + tokenPromises.push(sendAuthorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger, + })); + } + const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token)); + if (auxiliaryTokens.length === 0) { + logger.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`); + return next(request); + } + request.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", ")); + return next(request); + }, + }; +} +exports.auxiliaryAuthenticationHeaderPolicy = auxiliaryAuthenticationHeaderPolicy; +//# sourceMappingURL=auxiliaryAuthenticationHeaderPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/bearerTokenAuthenticationPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/bearerTokenAuthenticationPolicy.js new file mode 100644 index 000000000..d5e57ea73 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/bearerTokenAuthenticationPolicy.js @@ -0,0 +1,112 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.bearerTokenAuthenticationPolicy = exports.bearerTokenAuthenticationPolicyName = void 0; +const tokenCycler_js_1 = require("../util/tokenCycler.js"); +const log_js_1 = require("../log.js"); +/** + * The programmatic identifier of the bearerTokenAuthenticationPolicy. + */ +exports.bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy"; +/** + * Default authorize request handler + */ +async function defaultAuthorizeRequest(options) { + const { scopes, getAccessToken, request } = options; + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + }; + const accessToken = await getAccessToken(scopes, getTokenOptions); + if (accessToken) { + options.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + } +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * A policy that can request a token from a TokenCredential implementation and + * then apply it to the Authorization header of a request as a Bearer token. + */ +function bearerTokenAuthenticationPolicy(options) { + var _a; + const { credential, scopes, challengeCallbacks } = options; + const logger = options.logger || log_js_1.logger; + const callbacks = Object.assign({ authorizeRequest: (_a = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequest) !== null && _a !== void 0 ? _a : defaultAuthorizeRequest, authorizeRequestOnChallenge: challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequestOnChallenge }, challengeCallbacks); + // This function encapsulates the entire process of reliably retrieving the token + // The options are left out of the public API until there's demand to configure this. + // Remember to extend `BearerTokenAuthenticationPolicyOptions` with `TokenCyclerOptions` + // in order to pass through the `options` object. + const getAccessToken = credential + ? (0, tokenCycler_js_1.createTokenCycler)(credential /* , options */) + : () => Promise.resolve(null); + return { + name: exports.bearerTokenAuthenticationPolicyName, + /** + * If there's no challenge parameter: + * - It will try to retrieve the token using the cache, or the credential's getToken. + * - Then it will try the next policy with or without the retrieved token. + * + * It uses the challenge parameters to: + * - Skip a first attempt to get the token from the credential if there's no cached token, + * since it expects the token to be retrievable only after the challenge. + * - Prepare the outgoing request if the `prepareRequest` method has been provided. + * - Send an initial request to receive the challenge if it fails. + * - Process a challenge if the response contains it. + * - Retrieve a token with the challenge information, then re-send the request. + */ + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + await callbacks.authorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger, + }); + let response; + let error; + try { + response = await next(request); + } + catch (err) { + error = err; + response = err.response; + } + if (callbacks.authorizeRequestOnChallenge && + (response === null || response === void 0 ? void 0 : response.status) === 401 && + getChallenge(response)) { + // processes challenge + const shouldSendRequest = await callbacks.authorizeRequestOnChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + response, + getAccessToken, + logger, + }); + if (shouldSendRequest) { + return next(request); + } + } + if (error) { + throw error; + } + else { + return response; + } + }, + }; +} +exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; +//# sourceMappingURL=bearerTokenAuthenticationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/decompressResponsePolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/decompressResponsePolicy.js new file mode 100644 index 000000000..695f8dc53 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/decompressResponsePolicy.js @@ -0,0 +1,27 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.decompressResponsePolicy = exports.decompressResponsePolicyName = void 0; +/** + * The programmatic identifier of the decompressResponsePolicy. + */ +exports.decompressResponsePolicyName = "decompressResponsePolicy"; +/** + * A policy to enable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +function decompressResponsePolicy() { + return { + name: exports.decompressResponsePolicyName, + async sendRequest(request, next) { + // HEAD requests have no body + if (request.method !== "HEAD") { + request.headers.set("Accept-Encoding", "gzip,deflate"); + } + return next(request); + }, + }; +} +exports.decompressResponsePolicy = decompressResponsePolicy; +//# sourceMappingURL=decompressResponsePolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/defaultRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/defaultRetryPolicy.js new file mode 100644 index 000000000..c5f992997 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/defaultRetryPolicy.js @@ -0,0 +1,30 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultRetryPolicy = exports.defaultRetryPolicyName = void 0; +const exponentialRetryStrategy_js_1 = require("../retryStrategies/exponentialRetryStrategy.js"); +const throttlingRetryStrategy_js_1 = require("../retryStrategies/throttlingRetryStrategy.js"); +const retryPolicy_js_1 = require("./retryPolicy.js"); +const constants_js_1 = require("../constants.js"); +/** + * Name of the {@link defaultRetryPolicy} + */ +exports.defaultRetryPolicyName = "defaultRetryPolicy"; +/** + * A policy that retries according to three strategies: + * - When the server sends a 429 response with a Retry-After header. + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay. + */ +function defaultRetryPolicy(options = {}) { + var _a; + return { + name: exports.defaultRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)(), (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(options)], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +exports.defaultRetryPolicy = defaultRetryPolicy; +//# sourceMappingURL=defaultRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/exponentialRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/exponentialRetryPolicy.js new file mode 100644 index 000000000..2a486b681 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/exponentialRetryPolicy.js @@ -0,0 +1,26 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.exponentialRetryPolicy = exports.exponentialRetryPolicyName = void 0; +const exponentialRetryStrategy_js_1 = require("../retryStrategies/exponentialRetryStrategy.js"); +const retryPolicy_js_1 = require("./retryPolicy.js"); +const constants_js_1 = require("../constants.js"); +/** + * The programmatic identifier of the exponentialRetryPolicy. + */ +exports.exponentialRetryPolicyName = "exponentialRetryPolicy"; +/** + * A policy that attempts to retry requests while introducing an exponentially increasing delay. + * @param options - Options that configure retry logic. + */ +function exponentialRetryPolicy(options = {}) { + var _a; + return (0, retryPolicy_js_1.retryPolicy)([ + (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(Object.assign(Object.assign({}, options), { ignoreSystemErrors: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, + }); +} +exports.exponentialRetryPolicy = exponentialRetryPolicy; +//# sourceMappingURL=exponentialRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/formDataPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/formDataPolicy.js new file mode 100644 index 000000000..ecb44285e --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/formDataPolicy.js @@ -0,0 +1,100 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.formDataPolicy = exports.formDataPolicyName = void 0; +const core_util_1 = require("@azure/core-util"); +const httpHeaders_js_1 = require("../httpHeaders.js"); +/** + * The programmatic identifier of the formDataPolicy. + */ +exports.formDataPolicyName = "formDataPolicy"; +function formDataToFormDataMap(formData) { + var _a; + const formDataMap = {}; + for (const [key, value] of formData.entries()) { + (_a = formDataMap[key]) !== null && _a !== void 0 ? _a : (formDataMap[key] = []); + formDataMap[key].push(value); + } + return formDataMap; +} +/** + * A policy that encodes FormData on the request into the body. + */ +function formDataPolicy() { + return { + name: exports.formDataPolicyName, + async sendRequest(request, next) { + if (core_util_1.isNodeLike && typeof FormData !== "undefined" && request.body instanceof FormData) { + request.formData = formDataToFormDataMap(request.body); + request.body = undefined; + } + if (request.formData) { + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { + request.body = wwwFormUrlEncode(request.formData); + } + else { + await prepareFormData(request.formData, request); + } + request.formData = undefined; + } + return next(request); + }, + }; +} +exports.formDataPolicy = formDataPolicy; +function wwwFormUrlEncode(formData) { + const urlSearchParams = new URLSearchParams(); + for (const [key, value] of Object.entries(formData)) { + if (Array.isArray(value)) { + for (const subValue of value) { + urlSearchParams.append(key, subValue.toString()); + } + } + else { + urlSearchParams.append(key, value.toString()); + } + } + return urlSearchParams.toString(); +} +async function prepareFormData(formData, request) { + // validate content type (multipart/form-data) + const contentType = request.headers.get("Content-Type"); + if (contentType && !contentType.startsWith("multipart/form-data")) { + // content type is specified and is not multipart/form-data. Exit. + return; + } + request.headers.set("Content-Type", contentType !== null && contentType !== void 0 ? contentType : "multipart/form-data"); + // set body to MultipartRequestBody using content from FormDataMap + const parts = []; + for (const [fieldName, values] of Object.entries(formData)) { + for (const value of Array.isArray(values) ? values : [values]) { + if (typeof value === "string") { + parts.push({ + headers: (0, httpHeaders_js_1.createHttpHeaders)({ + "Content-Disposition": `form-data; name="${fieldName}"`, + }), + body: (0, core_util_1.stringToUint8Array)(value, "utf-8"), + }); + } + else if (value === undefined || value === null || typeof value !== "object") { + throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`); + } + else { + // using || instead of ?? here since if value.name is empty we should create a file name + const fileName = value.name || "blob"; + const headers = (0, httpHeaders_js_1.createHttpHeaders)(); + headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); + // again, || is used since an empty value.type means the content type is unset + headers.set("Content-Type", value.type || "application/octet-stream"); + parts.push({ + headers, + body: value, + }); + } + } + } + request.multipartBody = { parts }; +} +//# sourceMappingURL=formDataPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/logPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/logPolicy.js new file mode 100644 index 000000000..c7d21e72f --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/logPolicy.js @@ -0,0 +1,38 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.logPolicy = exports.logPolicyName = void 0; +const log_js_1 = require("../log.js"); +const sanitizer_js_1 = require("../util/sanitizer.js"); +/** + * The programmatic identifier of the logPolicy. + */ +exports.logPolicyName = "logPolicy"; +/** + * A policy that logs all requests and responses. + * @param options - Options to configure logPolicy. + */ +function logPolicy(options = {}) { + var _a; + const logger = (_a = options.logger) !== null && _a !== void 0 ? _a : log_js_1.logger.info; + const sanitizer = new sanitizer_js_1.Sanitizer({ + additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, + additionalAllowedQueryParameters: options.additionalAllowedQueryParameters, + }); + return { + name: exports.logPolicyName, + async sendRequest(request, next) { + if (!logger.enabled) { + return next(request); + } + logger(`Request: ${sanitizer.sanitize(request)}`); + const response = await next(request); + logger(`Response status code: ${response.status}`); + logger(`Headers: ${sanitizer.sanitize(response.headers)}`); + return response; + }, + }; +} +exports.logPolicy = logPolicy; +//# sourceMappingURL=logPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/multipartPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/multipartPolicy.js new file mode 100644 index 000000000..ff071b568 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/multipartPolicy.js @@ -0,0 +1,115 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.multipartPolicy = exports.multipartPolicyName = void 0; +const core_util_1 = require("@azure/core-util"); +const concat_js_1 = require("../util/concat.js"); +const typeGuards_js_1 = require("../util/typeGuards.js"); +function generateBoundary() { + return `----AzSDKFormBoundary${(0, core_util_1.randomUUID)()}`; +} +function encodeHeaders(headers) { + let result = ""; + for (const [key, value] of headers) { + result += `${key}: ${value}\r\n`; + } + return result; +} +function getLength(source) { + if (source instanceof Uint8Array) { + return source.byteLength; + } + else if ((0, typeGuards_js_1.isBlob)(source)) { + // if was created using createFile then -1 means we have an unknown size + return source.size === -1 ? undefined : source.size; + } + else { + return undefined; + } +} +function getTotalLength(sources) { + let total = 0; + for (const source of sources) { + const partLength = getLength(source); + if (partLength === undefined) { + return undefined; + } + else { + total += partLength; + } + } + return total; +} +async function buildRequestBody(request, parts, boundary) { + const sources = [ + (0, core_util_1.stringToUint8Array)(`--${boundary}`, "utf-8"), + ...parts.flatMap((part) => [ + (0, core_util_1.stringToUint8Array)("\r\n", "utf-8"), + (0, core_util_1.stringToUint8Array)(encodeHeaders(part.headers), "utf-8"), + (0, core_util_1.stringToUint8Array)("\r\n", "utf-8"), + part.body, + (0, core_util_1.stringToUint8Array)(`\r\n--${boundary}`, "utf-8"), + ]), + (0, core_util_1.stringToUint8Array)("--\r\n\r\n", "utf-8"), + ]; + const contentLength = getTotalLength(sources); + if (contentLength) { + request.headers.set("Content-Length", contentLength); + } + request.body = await (0, concat_js_1.concat)(sources); +} +/** + * Name of multipart policy + */ +exports.multipartPolicyName = "multipartPolicy"; +const maxBoundaryLength = 70; +const validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`); +function assertValidBoundary(boundary) { + if (boundary.length > maxBoundaryLength) { + throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`); + } + if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) { + throw new Error(`Multipart boundary "${boundary}" contains invalid characters`); + } +} +/** + * Pipeline policy for multipart requests + */ +function multipartPolicy() { + return { + name: exports.multipartPolicyName, + async sendRequest(request, next) { + var _a; + if (!request.multipartBody) { + return next(request); + } + if (request.body) { + throw new Error("multipartBody and regular body cannot be set at the same time"); + } + let boundary = request.multipartBody.boundary; + const contentTypeHeader = (_a = request.headers.get("Content-Type")) !== null && _a !== void 0 ? _a : "multipart/mixed"; + const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/); + if (!parsedHeader) { + throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`); + } + const [, contentType, parsedBoundary] = parsedHeader; + if (parsedBoundary && boundary && parsedBoundary !== boundary) { + throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`); + } + boundary !== null && boundary !== void 0 ? boundary : (boundary = parsedBoundary); + if (boundary) { + assertValidBoundary(boundary); + } + else { + boundary = generateBoundary(); + } + request.headers.set("Content-Type", `${contentType}; boundary=${boundary}`); + await buildRequestBody(request, request.multipartBody.parts, boundary); + request.multipartBody = undefined; + return next(request); + }, + }; +} +exports.multipartPolicy = multipartPolicy; +//# sourceMappingURL=multipartPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/ndJsonPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/ndJsonPolicy.js new file mode 100644 index 000000000..8fa915168 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/ndJsonPolicy.js @@ -0,0 +1,29 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ndJsonPolicy = exports.ndJsonPolicyName = void 0; +/** + * The programmatic identifier of the ndJsonPolicy. + */ +exports.ndJsonPolicyName = "ndJsonPolicy"; +/** + * ndJsonPolicy is a policy used to control keep alive settings for every request. + */ +function ndJsonPolicy() { + return { + name: exports.ndJsonPolicyName, + async sendRequest(request, next) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return next(request); + }, + }; +} +exports.ndJsonPolicy = ndJsonPolicy; +//# sourceMappingURL=ndJsonPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/proxyPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/proxyPolicy.js new file mode 100644 index 000000000..98a0a0480 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/proxyPolicy.js @@ -0,0 +1,197 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.proxyPolicy = exports.getDefaultProxySettings = exports.loadNoProxy = exports.globalNoProxyList = exports.proxyPolicyName = void 0; +const https_proxy_agent_1 = require("https-proxy-agent"); +const http_proxy_agent_1 = require("http-proxy-agent"); +const log_js_1 = require("../log.js"); +const HTTPS_PROXY = "HTTPS_PROXY"; +const HTTP_PROXY = "HTTP_PROXY"; +const ALL_PROXY = "ALL_PROXY"; +const NO_PROXY = "NO_PROXY"; +/** + * The programmatic identifier of the proxyPolicy. + */ +exports.proxyPolicyName = "proxyPolicy"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +exports.globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(HTTPS_PROXY); + const allProxy = getEnvironmentValue(ALL_PROXY); + const httpProxy = getEnvironmentValue(HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = new URL(uri).hostname; + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +function loadNoProxy() { + const noProxy = getEnvironmentValue(NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +exports.loadNoProxy = loadNoProxy; +/** + * This method converts a proxy url into `ProxySettings` for use with ProxyPolicy. + * If no argument is given, it attempts to parse a proxy URL from the environment + * variables `HTTPS_PROXY` or `HTTP_PROXY`. + * @param proxyUrl - The url of the proxy to use. May contain authentication information. + * @deprecated - Internally this method is no longer necessary when setting proxy information. + */ +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const parsedUrl = new URL(proxyUrl); + const schema = parsedUrl.protocol ? parsedUrl.protocol + "//" : ""; + return { + host: schema + parsedUrl.hostname, + port: Number.parseInt(parsedUrl.port || "80"), + username: parsedUrl.username, + password: parsedUrl.password, + }; +} +exports.getDefaultProxySettings = getDefaultProxySettings; +/** + * This method attempts to parse a proxy URL from the environment + * variables `HTTPS_PROXY` or `HTTP_PROXY`. + */ +function getDefaultProxySettingsInternal() { + const envProxy = loadEnvironmentProxyValue(); + return envProxy ? new URL(envProxy) : undefined; +} +function getUrlFromProxySettings(settings) { + let parsedProxyUrl; + try { + parsedProxyUrl = new URL(settings.host); + } + catch (_error) { + throw new Error(`Expecting a valid host string in proxy settings, but found "${settings.host}".`); + } + parsedProxyUrl.port = String(settings.port); + if (settings.username) { + parsedProxyUrl.username = settings.username; + } + if (settings.password) { + parsedProxyUrl.password = settings.password; + } + return parsedProxyUrl; +} +function setProxyAgentOnRequest(request, cachedAgents, proxyUrl) { + // Custom Agent should take precedence so if one is present + // we should skip to avoid overwriting it. + if (request.agent) { + return; + } + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (request.tlsSettings) { + log_js_1.logger.warning("TLS settings are not supported in combination with custom Proxy, certificates provided to the client will be ignored."); + } + const headers = request.headers.toJSON(); + if (isInsecure) { + if (!cachedAgents.httpProxyAgent) { + cachedAgents.httpProxyAgent = new http_proxy_agent_1.HttpProxyAgent(proxyUrl, { headers }); + } + request.agent = cachedAgents.httpProxyAgent; + } + else { + if (!cachedAgents.httpsProxyAgent) { + cachedAgents.httpsProxyAgent = new https_proxy_agent_1.HttpsProxyAgent(proxyUrl, { headers }); + } + request.agent = cachedAgents.httpsProxyAgent; + } +} +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +function proxyPolicy(proxySettings, options) { + if (!noProxyListLoaded) { + exports.globalNoProxyList.push(...loadNoProxy()); + } + const defaultProxy = proxySettings + ? getUrlFromProxySettings(proxySettings) + : getDefaultProxySettingsInternal(); + const cachedAgents = {}; + return { + name: exports.proxyPolicyName, + async sendRequest(request, next) { + var _a; + if (!request.proxySettings && + defaultProxy && + !isBypassed(request.url, (_a = options === null || options === void 0 ? void 0 : options.customNoProxyList) !== null && _a !== void 0 ? _a : exports.globalNoProxyList, (options === null || options === void 0 ? void 0 : options.customNoProxyList) ? undefined : globalBypassedMap)) { + setProxyAgentOnRequest(request, cachedAgents, defaultProxy); + } + else if (request.proxySettings) { + setProxyAgentOnRequest(request, cachedAgents, getUrlFromProxySettings(request.proxySettings)); + } + return next(request); + }, + }; +} +exports.proxyPolicy = proxyPolicy; +//# sourceMappingURL=proxyPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/redirectPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/redirectPolicy.js new file mode 100644 index 000000000..b8c91520a --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/redirectPolicy.js @@ -0,0 +1,56 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.redirectPolicy = exports.redirectPolicyName = void 0; +/** + * The programmatic identifier of the redirectPolicy. + */ +exports.redirectPolicyName = "redirectPolicy"; +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +/** + * A policy to follow Location headers from the server in order + * to support server-side redirection. + * In the browser, this policy is not used. + * @param options - Options to control policy behavior. + */ +function redirectPolicy(options = {}) { + const { maxRetries = 20 } = options; + return { + name: exports.redirectPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return handleRedirect(next, response, maxRetries); + }, + }; +} +exports.redirectPolicy = redirectPolicy; +async function handleRedirect(next, response, maxRetries, currentRetries = 0) { + const { request, status, headers } = response; + const locationHeader = headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + currentRetries < maxRetries) { + const url = new URL(locationHeader, request.url); + request.url = url.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + request.headers.delete("Content-Length"); + delete request.body; + } + request.headers.delete("Authorization"); + const res = await next(request); + return handleRedirect(next, res, maxRetries, currentRetries + 1); + } + return response; +} +//# sourceMappingURL=redirectPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/retryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/retryPolicy.js new file mode 100644 index 000000000..72a62a715 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/retryPolicy.js @@ -0,0 +1,110 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.retryPolicy = void 0; +const helpers_js_1 = require("../util/helpers.js"); +const logger_1 = require("@azure/logger"); +const abort_controller_1 = require("@azure/abort-controller"); +const constants_js_1 = require("../constants.js"); +const retryPolicyLogger = (0, logger_1.createClientLogger)("core-rest-pipeline retryPolicy"); +/** + * The programmatic identifier of the retryPolicy. + */ +const retryPolicyName = "retryPolicy"; +/** + * retryPolicy is a generic policy to enable retrying requests when certain conditions are met + */ +function retryPolicy(strategies, options = { maxRetries: constants_js_1.DEFAULT_RETRY_POLICY_COUNT }) { + const logger = options.logger || retryPolicyLogger; + return { + name: retryPolicyName, + async sendRequest(request, next) { + var _a, _b; + let response; + let responseError; + let retryCount = -1; + // eslint-disable-next-line no-constant-condition + retryRequest: while (true) { + retryCount += 1; + response = undefined; + responseError = undefined; + try { + logger.info(`Retry ${retryCount}: Attempting to send request`, request.requestId); + response = await next(request); + logger.info(`Retry ${retryCount}: Received a response from request`, request.requestId); + } + catch (e) { + logger.error(`Retry ${retryCount}: Received an error from request`, request.requestId); + // RestErrors are valid targets for the retry strategies. + // If none of the retry strategies can work with them, they will be thrown later in this policy. + // If the received error is not a RestError, it is immediately thrown. + responseError = e; + if (!e || responseError.name !== "RestError") { + throw e; + } + response = responseError.response; + } + if ((_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + logger.error(`Retry ${retryCount}: Request aborted.`); + const abortError = new abort_controller_1.AbortError(); + throw abortError; + } + if (retryCount >= ((_b = options.maxRetries) !== null && _b !== void 0 ? _b : constants_js_1.DEFAULT_RETRY_POLICY_COUNT)) { + logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); + if (responseError) { + throw responseError; + } + else if (response) { + return response; + } + else { + throw new Error("Maximum retries reached with no response or error to throw"); + } + } + logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); + strategiesLoop: for (const strategy of strategies) { + const strategyLogger = strategy.logger || retryPolicyLogger; + strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); + const modifiers = strategy.retry({ + retryCount, + response, + responseError, + }); + if (modifiers.skipStrategy) { + strategyLogger.info(`Retry ${retryCount}: Skipped.`); + continue strategiesLoop; + } + const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; + if (errorToThrow) { + strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); + throw errorToThrow; + } + if (retryAfterInMs || retryAfterInMs === 0) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); + await (0, helpers_js_1.delay)(retryAfterInMs, undefined, { abortSignal: request.abortSignal }); + continue retryRequest; + } + if (redirectTo) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); + request.url = redirectTo; + continue retryRequest; + } + } + if (responseError) { + logger.info(`None of the retry strategies could work with the received error. Throwing it.`); + throw responseError; + } + if (response) { + logger.info(`None of the retry strategies could work with the received response. Returning it.`); + return response; + } + // If all the retries skip and there's no response, + // we're still in the retry loop, so a new request will be sent + // until `maxRetries` is reached. + } + }, + }; +} +exports.retryPolicy = retryPolicy; +//# sourceMappingURL=retryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/setClientRequestIdPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/setClientRequestIdPolicy.js new file mode 100644 index 000000000..b26d7227c --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/setClientRequestIdPolicy.js @@ -0,0 +1,28 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.setClientRequestIdPolicy = exports.setClientRequestIdPolicyName = void 0; +/** + * The programmatic identifier of the setClientRequestIdPolicy. + */ +exports.setClientRequestIdPolicyName = "setClientRequestIdPolicy"; +/** + * Each PipelineRequest gets a unique id upon creation. + * This policy passes that unique id along via an HTTP header to enable better + * telemetry and tracing. + * @param requestIdHeaderName - The name of the header to pass the request ID to. + */ +function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + name: exports.setClientRequestIdPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(requestIdHeaderName)) { + request.headers.set(requestIdHeaderName, request.requestId); + } + return next(request); + }, + }; +} +exports.setClientRequestIdPolicy = setClientRequestIdPolicy; +//# sourceMappingURL=setClientRequestIdPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/systemErrorRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/systemErrorRetryPolicy.js new file mode 100644 index 000000000..fb6238e97 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/systemErrorRetryPolicy.js @@ -0,0 +1,31 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.systemErrorRetryPolicy = exports.systemErrorRetryPolicyName = void 0; +const exponentialRetryStrategy_js_1 = require("../retryStrategies/exponentialRetryStrategy.js"); +const retryPolicy_js_1 = require("./retryPolicy.js"); +const constants_js_1 = require("../constants.js"); +/** + * Name of the {@link systemErrorRetryPolicy} + */ +exports.systemErrorRetryPolicyName = "systemErrorRetryPolicy"; +/** + * A retry policy that specifically seeks to handle errors in the + * underlying transport layer (e.g. DNS lookup failures) rather than + * retryable error codes from the server itself. + * @param options - Options that customize the policy. + */ +function systemErrorRetryPolicy(options = {}) { + var _a; + return { + name: exports.systemErrorRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([ + (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(Object.assign(Object.assign({}, options), { ignoreHttpStatusCodes: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +exports.systemErrorRetryPolicy = systemErrorRetryPolicy; +//# sourceMappingURL=systemErrorRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/throttlingRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/throttlingRetryPolicy.js new file mode 100644 index 000000000..2a20b6467 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/throttlingRetryPolicy.js @@ -0,0 +1,33 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.throttlingRetryPolicy = exports.throttlingRetryPolicyName = void 0; +const throttlingRetryStrategy_js_1 = require("../retryStrategies/throttlingRetryStrategy.js"); +const retryPolicy_js_1 = require("./retryPolicy.js"); +const constants_js_1 = require("../constants.js"); +/** + * Name of the {@link throttlingRetryPolicy} + */ +exports.throttlingRetryPolicyName = "throttlingRetryPolicy"; +/** + * A policy that retries when the server sends a 429 response with a Retry-After header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * + * @param options - Options that configure retry logic. + */ +function throttlingRetryPolicy(options = {}) { + var _a; + return { + name: exports.throttlingRetryPolicyName, + sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)()], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +exports.throttlingRetryPolicy = throttlingRetryPolicy; +//# sourceMappingURL=throttlingRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tlsPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tlsPolicy.js new file mode 100644 index 000000000..2ace3ad93 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tlsPolicy.js @@ -0,0 +1,26 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.tlsPolicy = exports.tlsPolicyName = void 0; +/** + * Name of the TLS Policy + */ +exports.tlsPolicyName = "tlsPolicy"; +/** + * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication. + */ +function tlsPolicy(tlsSettings) { + return { + name: exports.tlsPolicyName, + sendRequest: async (req, next) => { + // Users may define a request tlsSettings, honor those over the client level one + if (!req.tlsSettings) { + req.tlsSettings = tlsSettings; + } + return next(req); + }, + }; +} +exports.tlsPolicy = tlsPolicy; +//# sourceMappingURL=tlsPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tracingPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tracingPolicy.js new file mode 100644 index 000000000..698e58d67 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/tracingPolicy.js @@ -0,0 +1,124 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.tracingPolicy = exports.tracingPolicyName = void 0; +const core_tracing_1 = require("@azure/core-tracing"); +const constants_js_1 = require("../constants.js"); +const userAgent_js_1 = require("../util/userAgent.js"); +const log_js_1 = require("../log.js"); +const core_util_1 = require("@azure/core-util"); +const restError_js_1 = require("../restError.js"); +/** + * The programmatic identifier of the tracingPolicy. + */ +exports.tracingPolicyName = "tracingPolicy"; +/** + * A simple policy to create OpenTelemetry Spans for each request made by the pipeline + * that has SpanOptions with a parent. + * Requests made without a parent Span will not be recorded. + * @param options - Options to configure the telemetry logged by the tracing policy. + */ +function tracingPolicy(options = {}) { + const userAgent = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); + const tracingClient = tryCreateTracingClient(); + return { + name: exports.tracingPolicyName, + async sendRequest(request, next) { + var _a, _b; + if (!tracingClient || !((_a = request.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext)) { + return next(request); + } + const { span, tracingContext } = (_b = tryCreateSpan(tracingClient, request, userAgent)) !== null && _b !== void 0 ? _b : {}; + if (!span || !tracingContext) { + return next(request); + } + try { + const response = await tracingClient.withContext(tracingContext, next, request); + tryProcessResponse(span, response); + return response; + } + catch (err) { + tryProcessError(span, err); + throw err; + } + }, + }; +} +exports.tracingPolicy = tracingPolicy; +function tryCreateTracingClient() { + try { + return (0, core_tracing_1.createTracingClient)({ + namespace: "", + packageName: "@azure/core-rest-pipeline", + packageVersion: constants_js_1.SDK_VERSION, + }); + } + catch (e) { + log_js_1.logger.warning(`Error when creating the TracingClient: ${(0, core_util_1.getErrorMessage)(e)}`); + return undefined; + } +} +function tryCreateSpan(tracingClient, request, userAgent) { + try { + // As per spec, we do not need to differentiate between HTTP and HTTPS in span name. + const { span, updatedOptions } = tracingClient.startSpan(`HTTP ${request.method}`, { tracingOptions: request.tracingOptions }, { + spanKind: "client", + spanAttributes: { + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId, + }, + }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; + } + if (userAgent) { + span.setAttribute("http.user_agent", userAgent); + } + // set headers + const headers = tracingClient.createRequestHeaders(updatedOptions.tracingOptions.tracingContext); + for (const [key, value] of Object.entries(headers)) { + request.headers.set(key, value); + } + return { span, tracingContext: updatedOptions.tracingOptions.tracingContext }; + } + catch (e) { + log_js_1.logger.warning(`Skipping creating a tracing span due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); + return undefined; + } +} +function tryProcessError(span, error) { + try { + span.setStatus({ + status: "error", + error: (0, core_util_1.isError)(error) ? error : undefined, + }); + if ((0, restError_js_1.isRestError)(error) && error.statusCode) { + span.setAttribute("http.status_code", error.statusCode); + } + span.end(); + } + catch (e) { + log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); + } +} +function tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + status: "success", + }); + span.end(); + } + catch (e) { + log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); + } +} +//# sourceMappingURL=tracingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/userAgentPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/userAgentPolicy.js new file mode 100644 index 000000000..788f84589 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/policies/userAgentPolicy.js @@ -0,0 +1,30 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.userAgentPolicy = exports.userAgentPolicyName = void 0; +const userAgent_js_1 = require("../util/userAgent.js"); +const UserAgentHeaderName = (0, userAgent_js_1.getUserAgentHeaderName)(); +/** + * The programmatic identifier of the userAgentPolicy. + */ +exports.userAgentPolicyName = "userAgentPolicy"; +/** + * A policy that sets the User-Agent header (or equivalent) to reflect + * the library version. + * @param options - Options to customize the user agent value. + */ +function userAgentPolicy(options = {}) { + const userAgentValue = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); + return { + name: exports.userAgentPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(UserAgentHeaderName)) { + request.headers.set(UserAgentHeaderName, userAgentValue); + } + return next(request); + }, + }; +} +exports.userAgentPolicy = userAgentPolicy; +//# sourceMappingURL=userAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/restError.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/restError.js new file mode 100644 index 000000000..7cfe3b789 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/restError.js @@ -0,0 +1,53 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isRestError = exports.RestError = void 0; +const core_util_1 = require("@azure/core-util"); +const inspect_js_1 = require("./util/inspect.js"); +const sanitizer_js_1 = require("./util/sanitizer.js"); +const errorSanitizer = new sanitizer_js_1.Sanitizer(); +/** + * A custom error type for failed pipeline requests. + */ +class RestError extends Error { + constructor(message, options = {}) { + super(message); + this.name = "RestError"; + this.code = options.code; + this.statusCode = options.statusCode; + this.request = options.request; + this.response = options.response; + Object.setPrototypeOf(this, RestError.prototype); + } + /** + * Logging method for util.inspect in Node + */ + [inspect_js_1.custom]() { + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; + } +} +exports.RestError = RestError; +/** + * Something went wrong when making the request. + * This means the actual request failed for some reason, + * such as a DNS issue or the connection being lost. + */ +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * This means that parsing the response from the server failed. + * It may have been malformed. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; +/** + * Typeguard for RestError + * @param e - Something caught by a catch clause. + */ +function isRestError(e) { + if (e instanceof RestError) { + return true; + } + return (0, core_util_1.isError)(e) && e.name === "RestError"; +} +exports.isRestError = isRestError; +//# sourceMappingURL=restError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/retryStrategies/exponentialRetryStrategy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/retryStrategies/exponentialRetryStrategy.js new file mode 100644 index 000000000..34be3426c --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/retryStrategies/exponentialRetryStrategy.js @@ -0,0 +1,76 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isSystemError = exports.isExponentialRetryResponse = exports.exponentialRetryStrategy = void 0; +const core_util_1 = require("@azure/core-util"); +const throttlingRetryStrategy_js_1 = require("./throttlingRetryStrategy.js"); +// intervals are in milliseconds +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 64; +/** + * A retry strategy that retries with an exponentially increasing delay in these two cases: + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails (408, greater or equal than 500, except for 501 and 505). + */ +function exponentialRetryStrategy(options = {}) { + var _a, _b; + const retryInterval = (_a = options.retryDelayInMs) !== null && _a !== void 0 ? _a : DEFAULT_CLIENT_RETRY_INTERVAL; + const maxRetryInterval = (_b = options.maxRetryDelayInMs) !== null && _b !== void 0 ? _b : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + let retryAfterInMs = retryInterval; + return { + name: "exponentialRetryStrategy", + retry({ retryCount, response, responseError }) { + const matchedSystemError = isSystemError(responseError); + const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; + const isExponential = isExponentialRetryResponse(response); + const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; + const unknownResponse = response && ((0, throttlingRetryStrategy_js_1.isThrottlingRetryResponse)(response) || !isExponential); + if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { + return { skipStrategy: true }; + } + if (responseError && !matchedSystemError && !isExponential) { + return { errorToThrow: responseError }; + } + // Exponentially increase the delay each time + const exponentialDelay = retryAfterInMs * Math.pow(2, retryCount); + // Don't let the delay exceed the maximum + const clampedExponentialDelay = Math.min(maxRetryInterval, exponentialDelay); + // Allow the final value to have some "jitter" (within 50% of the delay size) so + // that retries across multiple clients don't occur simultaneously. + retryAfterInMs = + clampedExponentialDelay / 2 + (0, core_util_1.getRandomIntegerInclusive)(0, clampedExponentialDelay / 2); + return { retryAfterInMs }; + }, + }; +} +exports.exponentialRetryStrategy = exponentialRetryStrategy; +/** + * A response is a retry response if it has status codes: + * - 408, or + * - Greater or equal than 500, except for 501 and 505. + */ +function isExponentialRetryResponse(response) { + return Boolean(response && + response.status !== undefined && + (response.status >= 500 || response.status === 408) && + response.status !== 501 && + response.status !== 505); +} +exports.isExponentialRetryResponse = isExponentialRetryResponse; +/** + * Determines whether an error from a pipeline response was triggered in the network layer. + */ +function isSystemError(err) { + if (!err) { + return false; + } + return (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT" || + err.code === "ENOTFOUND"); +} +exports.isSystemError = isSystemError; +//# sourceMappingURL=exponentialRetryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/retryStrategies/retryStrategy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/retryStrategies/retryStrategy.js new file mode 100644 index 000000000..026e62698 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/retryStrategies/retryStrategy.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=retryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/retryStrategies/throttlingRetryStrategy.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/retryStrategies/throttlingRetryStrategy.js new file mode 100644 index 000000000..7403cd392 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/retryStrategies/throttlingRetryStrategy.js @@ -0,0 +1,79 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.throttlingRetryStrategy = exports.isThrottlingRetryResponse = void 0; +const helpers_js_1 = require("../util/helpers.js"); +/** + * The header that comes back from Azure services representing + * the amount of time (minimum) to wait to retry (in seconds or timestamp after which we can retry). + */ +const RetryAfterHeader = "Retry-After"; +/** + * The headers that come back from Azure services representing + * the amount of time (minimum) to wait to retry. + * + * "retry-after-ms", "x-ms-retry-after-ms" : milliseconds + * "Retry-After" : seconds or timestamp + */ +const AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; +/** + * A response is a throttling retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + * + * Returns the `retryAfterInMs` value if the response is a throttling retry response. + * If not throttling retry response, returns `undefined`. + * + * @internal + */ +function getRetryAfterInMs(response) { + if (!(response && [429, 503].includes(response.status))) + return undefined; + try { + // Headers: "retry-after-ms", "x-ms-retry-after-ms", "Retry-After" + for (const header of AllRetryAfterHeaders) { + const retryAfterValue = (0, helpers_js_1.parseHeaderValueAsNumber)(response, header); + if (retryAfterValue === 0 || retryAfterValue) { + // "Retry-After" header ==> seconds + // "retry-after-ms", "x-ms-retry-after-ms" headers ==> milli-seconds + const multiplyingFactor = header === RetryAfterHeader ? 1000 : 1; + return retryAfterValue * multiplyingFactor; // in milli-seconds + } + } + // RetryAfterHeader ("Retry-After") has a special case where it might be formatted as a date instead of a number of seconds + const retryAfterHeader = response.headers.get(RetryAfterHeader); + if (!retryAfterHeader) + return; + const date = Date.parse(retryAfterHeader); + const diff = date - Date.now(); + // negative diff would mean a date in the past, so retry asap with 0 milliseconds + return Number.isFinite(diff) ? Math.max(0, diff) : undefined; + } + catch (e) { + return undefined; + } +} +/** + * A response is a retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + */ +function isThrottlingRetryResponse(response) { + return Number.isFinite(getRetryAfterInMs(response)); +} +exports.isThrottlingRetryResponse = isThrottlingRetryResponse; +function throttlingRetryStrategy() { + return { + name: "throttlingRetryStrategy", + retry({ response }) { + const retryAfterInMs = getRetryAfterInMs(response); + if (!Number.isFinite(retryAfterInMs)) { + return { skipStrategy: true }; + } + return { + retryAfterInMs, + }; + }, + }; +} +exports.throttlingRetryStrategy = throttlingRetryStrategy; +//# sourceMappingURL=throttlingRetryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-rest-pipeline/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 000000000..6305f1798 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.43.1" + } + ] +} diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/concat.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/concat.js new file mode 100644 index 000000000..2a0c33a54 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/concat.js @@ -0,0 +1,92 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.concat = void 0; +const tslib_1 = require("tslib"); +const node_stream_1 = require("node:stream"); +const typeGuards_js_1 = require("./typeGuards.js"); +const file_js_1 = require("./file.js"); +function streamAsyncIterator() { + return tslib_1.__asyncGenerator(this, arguments, function* streamAsyncIterator_1() { + const reader = this.getReader(); + try { + while (true) { + const { done, value } = yield tslib_1.__await(reader.read()); + if (done) { + return yield tslib_1.__await(void 0); + } + yield yield tslib_1.__await(value); + } + } + finally { + reader.releaseLock(); + } + }); +} +function makeAsyncIterable(webStream) { + if (!webStream[Symbol.asyncIterator]) { + webStream[Symbol.asyncIterator] = streamAsyncIterator.bind(webStream); + } + if (!webStream.values) { + webStream.values = streamAsyncIterator.bind(webStream); + } +} +function ensureNodeStream(stream) { + if (stream instanceof ReadableStream) { + makeAsyncIterable(stream); + return node_stream_1.Readable.fromWeb(stream); + } + else { + return stream; + } +} +function toStream(source) { + if (source instanceof Uint8Array) { + return node_stream_1.Readable.from(Buffer.from(source)); + } + else if ((0, typeGuards_js_1.isBlob)(source)) { + return toStream((0, file_js_1.getRawContent)(source)); + } + else { + return ensureNodeStream(source); + } +} +/** + * Utility function that concatenates a set of binary inputs into one combined output. + * + * @param sources - array of sources for the concatenation + * @returns - in Node, a (() =\> NodeJS.ReadableStream) which, when read, produces a concatenation of all the inputs. + * In browser, returns a `Blob` representing all the concatenated inputs. + * + * @internal + */ +async function concat(sources) { + return function () { + const streams = sources.map((x) => (typeof x === "function" ? x() : x)).map(toStream); + return node_stream_1.Readable.from((function () { + return tslib_1.__asyncGenerator(this, arguments, function* () { + var _a, e_1, _b, _c; + for (const stream of streams) { + try { + for (var _d = true, stream_1 = (e_1 = void 0, tslib_1.__asyncValues(stream)), stream_1_1; stream_1_1 = yield tslib_1.__await(stream_1.next()), _a = stream_1_1.done, !_a; _d = true) { + _c = stream_1_1.value; + _d = false; + const chunk = _c; + yield yield tslib_1.__await(chunk); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = stream_1.return)) yield tslib_1.__await(_b.call(stream_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + }); + })()); + }; +} +exports.concat = concat; +//# sourceMappingURL=concat.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/file.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/file.js new file mode 100644 index 000000000..d89821acb --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/file.js @@ -0,0 +1,102 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createFile = exports.createFileFromStream = exports.getRawContent = void 0; +const core_util_1 = require("@azure/core-util"); +const typeGuards_js_1 = require("./typeGuards.js"); +const unimplementedMethods = { + arrayBuffer: () => { + throw new Error("Not implemented"); + }, + slice: () => { + throw new Error("Not implemented"); + }, + text: () => { + throw new Error("Not implemented"); + }, +}; +/** + * Private symbol used as key on objects created using createFile containing the + * original source of the file object. + * + * This is used in Node to access the original Node stream without using Blob#stream, which + * returns a web stream. This is done to avoid a couple of bugs to do with Blob#stream and + * Readable#to/fromWeb in Node versions we support: + * - https://github.com/nodejs/node/issues/42694 (fixed in Node 18.14) + * - https://github.com/nodejs/node/issues/48916 (fixed in Node 20.6) + * + * Once these versions are no longer supported, we may be able to stop doing this. + * + * @internal + */ +const rawContent = Symbol("rawContent"); +function hasRawContent(x) { + return typeof x[rawContent] === "function"; +} +/** + * Extract the raw content from a given blob-like object. If the input was created using createFile + * or createFileFromStream, the exact content passed into createFile/createFileFromStream will be used. + * For true instances of Blob and File, returns the blob's content as a Web ReadableStream. + * + * @internal + */ +function getRawContent(blob) { + if (hasRawContent(blob)) { + return blob[rawContent](); + } + else { + return blob.stream(); + } +} +exports.getRawContent = getRawContent; +/** + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function to: + * - Create a File object for use in RequestBodyType.formData in environments where the + * global File object is unavailable. + * - Create a File-like object from a readable stream without reading the stream into memory. + * + * @param stream - the content of the file as a callback returning a stream. When a File object made using createFile is + * passed in a request's form data map, the stream will not be read into memory + * and instead will be streamed when the request is made. In the event of a retry, the + * stream needs to be read again, so this callback SHOULD return a fresh stream if possible. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. + */ +function createFileFromStream(stream, name, options = {}) { + var _a, _b, _c, _d; + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: (_d = options.size) !== null && _d !== void 0 ? _d : -1, name, stream: () => { + const s = stream(); + if ((0, typeGuards_js_1.isNodeReadableStream)(s)) { + throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); + } + return s; + }, [rawContent]: stream }); +} +exports.createFileFromStream = createFileFromStream; +/** + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function create a File object for use in RequestBodyType.formData in environments where the global File object is unavailable. + * + * @param content - the content of the file as a Uint8Array in memory. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. + */ +function createFile(content, name, options = {}) { + var _a, _b, _c; + if (core_util_1.isNodeLike) { + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: content.byteLength, name, arrayBuffer: async () => content.buffer, stream: () => new Blob([content]).stream(), [rawContent]: () => content }); + } + else { + return new File([content], name, options); + } +} +exports.createFile = createFile; +//# sourceMappingURL=file.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/helpers.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/helpers.js new file mode 100644 index 000000000..6a9ff176a --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/helpers.js @@ -0,0 +1,63 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseHeaderValueAsNumber = exports.delay = void 0; +const abort_controller_1 = require("@azure/abort-controller"); +const StandardAbortMessage = "The operation was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * - abortSignal - The abortSignal associated with containing operation. + * - abortErrorMsg - The abort error message associated with containing operation. + * @returns Resolved promise + */ +function delay(delayInMs, value, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + return reject(new abort_controller_1.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (timer) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(value); + }, delayInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); +} +exports.delay = delay; +/** + * @internal + * @returns the parsed value or undefined if the parsed value is invalid. + */ +function parseHeaderValueAsNumber(response, headerName) { + const value = response.headers.get(headerName); + if (!value) + return; + const valueAsNum = Number(value); + if (Number.isNaN(valueAsNum)) + return; + return valueAsNum; +} +exports.parseHeaderValueAsNumber = parseHeaderValueAsNumber; +//# sourceMappingURL=helpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/inspect.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/inspect.js new file mode 100644 index 000000000..7ec55facc --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/inspect.js @@ -0,0 +1,8 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.custom = void 0; +const node_util_1 = require("node:util"); +exports.custom = node_util_1.inspect.custom; +//# sourceMappingURL=inspect.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/sanitizer.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/sanitizer.js new file mode 100644 index 000000000..9979c4084 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/sanitizer.js @@ -0,0 +1,143 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Sanitizer = void 0; +const core_util_1 = require("@azure/core-util"); +const RedactedString = "REDACTED"; +// Make sure this list is up-to-date with the one under core/logger/Readme#Keyconcepts +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; +/** + * @internal + */ +class Sanitizer { + constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [], } = {}) { + allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); + allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + } + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + } + if (key === "headers") { + return this.sanitizeHeaders(value); + } + else if (key === "url") { + return this.sanitizeUrl(value); + } + else if (key === "query") { + return this.sanitizeQuery(value); + } + else if (key === "body") { + // Don't log the request body + return undefined; + } + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || (0, core_util_1.isObject)(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeHeaders(obj) { + const sanitized = {}; + for (const key of Object.keys(obj)) { + if (this.allowedHeaderNames.has(key.toLowerCase())) { + sanitized[key] = obj[key]; + } + else { + sanitized[key] = RedactedString; + } + } + return sanitized; + } + sanitizeQuery(value) { + if (typeof value !== "object" || value === null) { + return value; + } + const sanitized = {}; + for (const k of Object.keys(value)) { + if (this.allowedQueryParameters.has(k.toLowerCase())) { + sanitized[k] = value[k]; + } + else { + sanitized[k] = RedactedString; + } + } + return sanitized; + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null) { + return value; + } + const url = new URL(value); + if (!url.search) { + return value; + } + for (const [key] of url.searchParams) { + if (!this.allowedQueryParameters.has(key.toLowerCase())) { + url.searchParams.set(key, RedactedString); + } + } + return url.toString(); + } +} +exports.Sanitizer = Sanitizer; +//# sourceMappingURL=sanitizer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/tokenCycler.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/tokenCycler.js new file mode 100644 index 000000000..c7fb0a32b --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/tokenCycler.js @@ -0,0 +1,153 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createTokenCycler = exports.DEFAULT_CYCLER_OPTIONS = void 0; +const helpers_js_1 = require("./helpers.js"); +// Default options for the cycler if none are provided +exports.DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires + retryIntervalInMs: 3000, // Allow refresh attempts every 3s + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - A function that produces a promise of an access token that may fail by returning null. + * @param retryIntervalInMs - The time (in milliseconds) to wait between retry attempts. + * @param refreshTimeout - The timestamp after which the refresh attempt will fail, throwing an exception. + * @returns - A promise that, if it resolves, will resolve with an access token. + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < refreshTimeout) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await (0, helpers_js_1.delay)(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + let tenantId; + const options = Object.assign(Object.assign({}, exports.DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(scopes, getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + tenantId = getTokenOptions.tenantId; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + tenantId = undefined; + throw reason; + }); + } + return refreshWorker; + } + return async (scopes, tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + // If the tenantId passed in token options is different to the one we have + // Or if we are in claim challenge and the token was rejected and a new access token need to be issued, we need to + // refresh the token with the new tenantId or token. + const mustRefresh = tenantId !== tokenOptions.tenantId || Boolean(tokenOptions.claims) || cycler.mustRefresh; + if (mustRefresh) + return refresh(scopes, tokenOptions); + if (cycler.shouldRefresh) { + refresh(scopes, tokenOptions); + } + return token; + }; +} +exports.createTokenCycler = createTokenCycler; +//# sourceMappingURL=tokenCycler.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/typeGuards.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/typeGuards.js new file mode 100644 index 000000000..44d1f4c56 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/typeGuards.js @@ -0,0 +1,24 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isBlob = exports.isReadableStream = exports.isWebReadableStream = exports.isNodeReadableStream = void 0; +function isNodeReadableStream(x) { + return Boolean(x && typeof x["pipe"] === "function"); +} +exports.isNodeReadableStream = isNodeReadableStream; +function isWebReadableStream(x) { + return Boolean(x && + typeof x.getReader === "function" && + typeof x.tee === "function"); +} +exports.isWebReadableStream = isWebReadableStream; +function isReadableStream(x) { + return isNodeReadableStream(x) || isWebReadableStream(x); +} +exports.isReadableStream = isReadableStream; +function isBlob(x) { + return typeof x.stream === "function"; +} +exports.isBlob = isBlob; +//# sourceMappingURL=typeGuards.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgent.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgent.js new file mode 100644 index 000000000..0c06037d1 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgent.js @@ -0,0 +1,35 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getUserAgentValue = exports.getUserAgentHeaderName = void 0; +const userAgentPlatform_js_1 = require("./userAgentPlatform.js"); +const constants_js_1 = require("../constants.js"); +function getUserAgentString(telemetryInfo) { + const parts = []; + for (const [key, value] of telemetryInfo) { + const token = value ? `${key}/${value}` : key; + parts.push(token); + } + return parts.join(" "); +} +/** + * @internal + */ +function getUserAgentHeaderName() { + return (0, userAgentPlatform_js_1.getHeaderName)(); +} +exports.getUserAgentHeaderName = getUserAgentHeaderName; +/** + * @internal + */ +function getUserAgentValue(prefix) { + const runtimeInfo = new Map(); + runtimeInfo.set("core-rest-pipeline", constants_js_1.SDK_VERSION); + (0, userAgentPlatform_js_1.setPlatformSpecificData)(runtimeInfo); + const defaultAgent = getUserAgentString(runtimeInfo); + const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; + return userAgentValue; +} +exports.getUserAgentValue = getUserAgentValue; +//# sourceMappingURL=userAgent.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgentPlatform.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgentPlatform.js new file mode 100644 index 000000000..66b626573 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/util/userAgentPlatform.js @@ -0,0 +1,33 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.setPlatformSpecificData = exports.getHeaderName = void 0; +const tslib_1 = require("tslib"); +const os = tslib_1.__importStar(require("node:os")); +const process = tslib_1.__importStar(require("node:process")); +/** + * @internal + */ +function getHeaderName() { + return "User-Agent"; +} +exports.getHeaderName = getHeaderName; +/** + * @internal + */ +function setPlatformSpecificData(map) { + const versions = process.versions; + if (versions.bun) { + map.set("Bun", versions.bun); + } + else if (versions.deno) { + map.set("Deno", versions.deno); + } + else if (versions.node) { + map.set("Node", versions.node); + } + map.set("OS", `(${os.arch()}-${os.type()}-${os.release()})`); +} +exports.setPlatformSpecificData = setPlatformSpecificData; +//# sourceMappingURL=userAgentPlatform.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/commonjs/xhrHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/commonjs/xhrHttpClient.js new file mode 100644 index 000000000..ab8611b3d --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/commonjs/xhrHttpClient.js @@ -0,0 +1,171 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createXhrHttpClient = void 0; +const abort_controller_1 = require("@azure/abort-controller"); +const httpHeaders_js_1 = require("./httpHeaders.js"); +const restError_js_1 = require("./restError.js"); +const typeGuards_js_1 = require("./util/typeGuards.js"); +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + * @internal + */ +class XhrHttpClient { + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + var _a; + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + const xhr = new XMLHttpRequest(); + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + const abortSignal = request.abortSignal; + if (abortSignal) { + if (abortSignal.aborted) { + throw new abort_controller_1.AbortError("The operation was aborted."); + } + const listener = () => { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener); + xhr.addEventListener("readystatechange", () => { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (const [name, value] of request.headers) { + xhr.setRequestHeader(name, value); + } + xhr.responseType = ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.size) ? "blob" : "text"; + const body = typeof request.body === "function" ? request.body() : request.body; + if ((0, typeGuards_js_1.isReadableStream)(body)) { + throw new Error("streams are not supported in XhrHttpClient."); + } + xhr.send(body === undefined ? null : body); + if (xhr.responseType === "blob") { + return new Promise((resolve, reject) => { + handleBlobResponse(xhr, request, resolve, reject); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", () => resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + })); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + } +} +function handleBlobResponse(xhr, request, res, rej) { + xhr.addEventListener("readystatechange", () => { + var _a, _b; + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(Number.POSITIVE_INFINITY)) || + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(xhr.status))) { + const blobBody = new Promise((resolve, reject) => { + xhr.addEventListener("load", () => { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody, + }); + } + else { + xhr.addEventListener("load", () => { + // xhr.response is of Blob type if the request is sent with xhr.responseType === "blob" + // but the status code is not one of the stream response status codes, + // so treat it as text and convert from Blob to text + if (xhr.response) { + xhr.response + .text() + .then((text) => { + res({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: text, + }); + return; + }) + .catch((e) => { + rej(e); + }); + } + else { + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + }); + } + }); + } + } + }); +} +function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", (rawEvent) => listener({ + loadedBytes: rawEvent.loaded, + })); + } +} +function parseHeaders(xhr) { + const responseHeaders = (0, httpHeaders_js_1.createHttpHeaders)(); + const headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (const line of headerLines) { + const index = line.indexOf(":"); + const headerName = line.slice(0, index); + const headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} +function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", () => reject(new restError_js_1.RestError(`Failed to send request to ${request.url}`, { + code: restError_js_1.RestError.REQUEST_SEND_ERROR, + request, + }))); + const abortError = new abort_controller_1.AbortError("The operation was aborted."); + xhr.addEventListener("abort", () => reject(abortError)); + xhr.addEventListener("timeout", () => reject(abortError)); +} +/** + * Create a new HttpClient instance for the browser environment. + * @internal + */ +function createXhrHttpClient() { + return new XhrHttpClient(); +} +exports.createXhrHttpClient = createXhrHttpClient; +//# sourceMappingURL=xhrHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/accessTokenCache.js b/node_modules/@azure/core-rest-pipeline/dist/esm/accessTokenCache.js new file mode 100644 index 000000000..ff7dee1a6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/accessTokenCache.js @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Defines the default token refresh buffer duration. + */ +export const DefaultTokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes +/** + * Provides an AccessTokenCache implementation which clears + * the cached AccessToken's after the expiresOnTimestamp has + * passed. + * @internal + */ +export class ExpiringAccessTokenCache { + /** + * Constructs an instance of ExpiringAccessTokenCache with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs = DefaultTokenRefreshBufferMs) { + this.tokenRefreshBufferMs = tokenRefreshBufferMs; + } + setCachedToken(accessToken) { + this.cachedToken = accessToken; + } + getCachedToken() { + if (this.cachedToken && + Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { + this.cachedToken = undefined; + } + return this.cachedToken; + } +} +//# sourceMappingURL=accessTokenCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/constants.js b/node_modules/@azure/core-rest-pipeline/dist/esm/constants.js new file mode 100644 index 000000000..88acfa811 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/constants.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const SDK_VERSION = "1.16.0"; +export const DEFAULT_RETRY_POLICY_COUNT = 3; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/createPipelineFromOptions.js b/node_modules/@azure/core-rest-pipeline/dist/esm/createPipelineFromOptions.js new file mode 100644 index 000000000..1cc155029 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/createPipelineFromOptions.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { logPolicy } from "./policies/logPolicy.js"; +import { createEmptyPipeline } from "./pipeline.js"; +import { redirectPolicy } from "./policies/redirectPolicy.js"; +import { userAgentPolicy } from "./policies/userAgentPolicy.js"; +import { multipartPolicy, multipartPolicyName } from "./policies/multipartPolicy.js"; +import { decompressResponsePolicy } from "./policies/decompressResponsePolicy.js"; +import { defaultRetryPolicy } from "./policies/defaultRetryPolicy.js"; +import { formDataPolicy } from "./policies/formDataPolicy.js"; +import { isNodeLike } from "@azure/core-util"; +import { proxyPolicy } from "./policies/proxyPolicy.js"; +import { setClientRequestIdPolicy } from "./policies/setClientRequestIdPolicy.js"; +import { tlsPolicy } from "./policies/tlsPolicy.js"; +import { tracingPolicy } from "./policies/tracingPolicy.js"; +/** + * Create a new pipeline with a default set of customizable policies. + * @param options - Options to configure a custom pipeline. + */ +export function createPipelineFromOptions(options) { + var _a; + const pipeline = createEmptyPipeline(); + if (isNodeLike) { + if (options.tlsOptions) { + pipeline.addPolicy(tlsPolicy(options.tlsOptions)); + } + pipeline.addPolicy(proxyPolicy(options.proxyOptions)); + pipeline.addPolicy(decompressResponsePolicy()); + } + pipeline.addPolicy(formDataPolicy(), { beforePolicies: [multipartPolicyName] }); + pipeline.addPolicy(userAgentPolicy(options.userAgentOptions)); + pipeline.addPolicy(setClientRequestIdPolicy((_a = options.telemetryOptions) === null || _a === void 0 ? void 0 : _a.clientRequestIdHeaderName)); + // The multipart policy is added after policies with no phase, so that + // policies can be added between it and formDataPolicy to modify + // properties (e.g., making the boundary constant in recorded tests). + pipeline.addPolicy(multipartPolicy(), { afterPhase: "Deserialize" }); + pipeline.addPolicy(defaultRetryPolicy(options.retryOptions), { phase: "Retry" }); + pipeline.addPolicy(tracingPolicy(options.userAgentOptions), { afterPhase: "Retry" }); + if (isNodeLike) { + // Both XHR and Fetch expect to handle redirects automatically, + // so only include this policy when we're in Node. + pipeline.addPolicy(redirectPolicy(options.redirectOptions), { afterPhase: "Retry" }); + } + pipeline.addPolicy(logPolicy(options.loggingOptions), { afterPhase: "Sign" }); + return pipeline; +} +//# sourceMappingURL=createPipelineFromOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/defaultHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/esm/defaultHttpClient.js new file mode 100644 index 000000000..342cf8678 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/defaultHttpClient.js @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createNodeHttpClient } from "./nodeHttpClient.js"; +/** + * Create the correct HttpClient for the current environment. + */ +export function createDefaultHttpClient() { + return createNodeHttpClient(); +} +//# sourceMappingURL=defaultHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/fetchHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/esm/fetchHttpClient.js new file mode 100644 index 000000000..64942e96d --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/fetchHttpClient.js @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +import { RestError } from "./restError.js"; +import { createHttpHeaders } from "./httpHeaders.js"; +import { isNodeReadableStream, isWebReadableStream } from "./util/typeGuards.js"; +/** + * Checks if the body is a Blob or Blob-like + */ +function isBlob(body) { + // File objects count as a type of Blob, so we want to use instanceof explicitly + return (typeof Blob === "function" || typeof Blob === "object") && body instanceof Blob; +} +/** + * A HttpClient implementation that uses window.fetch to send HTTP requests. + * @internal + */ +class FetchHttpClient { + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + try { + return await makeRequest(request); + } + catch (e) { + throw getError(e, request); + } + } +} +/** + * Sends a request + */ +async function makeRequest(request) { + const { abortController, abortControllerCleanup } = setupAbortSignal(request); + try { + const headers = buildFetchHeaders(request.headers); + const { streaming, body: requestBody } = buildRequestBody(request); + const requestInit = Object.assign(Object.assign({ body: requestBody, method: request.method, headers: headers, signal: abortController.signal }, ("credentials" in Request.prototype + ? { credentials: request.withCredentials ? "include" : "same-origin" } + : {})), ("cache" in Request.prototype ? { cache: "no-store" } : {})); + // According to https://fetch.spec.whatwg.org/#fetch-method, + // init.duplex must be set when body is a ReadableStream object. + // currently "half" is the only valid value. + if (streaming) { + requestInit.duplex = "half"; + } + /** + * Developers of the future: + * Do not set redirect: "manual" as part + * of request options. + * It will not work as you expect. + */ + const response = await fetch(request.url, requestInit); + // If we're uploading a blob, we need to fire the progress event manually + if (isBlob(request.body) && request.onUploadProgress) { + request.onUploadProgress({ loadedBytes: request.body.size }); + } + return buildPipelineResponse(response, request, abortControllerCleanup); + } + catch (e) { + abortControllerCleanup === null || abortControllerCleanup === void 0 ? void 0 : abortControllerCleanup(); + throw e; + } +} +/** + * Creates a pipeline response from a Fetch response; + */ +async function buildPipelineResponse(httpResponse, request, abortControllerCleanup) { + var _a, _b; + const headers = buildPipelineHeaders(httpResponse); + const response = { + request, + headers, + status: httpResponse.status, + }; + const bodyStream = isWebReadableStream(httpResponse.body) + ? buildBodyStream(httpResponse.body, { + onProgress: request.onDownloadProgress, + onEnd: abortControllerCleanup, + }) + : httpResponse.body; + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(Number.POSITIVE_INFINITY)) || + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(response.status))) { + if (request.enableBrowserStreams) { + response.browserStreamBody = bodyStream !== null && bodyStream !== void 0 ? bodyStream : undefined; + } + else { + const responseStream = new Response(bodyStream); + response.blobBody = responseStream.blob(); + abortControllerCleanup === null || abortControllerCleanup === void 0 ? void 0 : abortControllerCleanup(); + } + } + else { + const responseStream = new Response(bodyStream); + response.bodyAsText = await responseStream.text(); + abortControllerCleanup === null || abortControllerCleanup === void 0 ? void 0 : abortControllerCleanup(); + } + return response; +} +function setupAbortSignal(request) { + const abortController = new AbortController(); + // Cleanup function + let abortControllerCleanup; + /** + * Attach an abort listener to the request + */ + let abortListener; + if (request.abortSignal) { + if (request.abortSignal.aborted) { + throw new AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + request.abortSignal.addEventListener("abort", abortListener); + abortControllerCleanup = () => { + var _a; + if (abortListener) { + (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + } + }; + } + // If a timeout was passed, call the abort signal once the time elapses + if (request.timeout > 0) { + setTimeout(() => { + abortController.abort(); + }, request.timeout); + } + return { abortController, abortControllerCleanup }; +} +/** + * Gets the specific error + */ +// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters +function getError(e, request) { + var _a; + if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { + return e; + } + else { + return new RestError(`Error sending request: ${e.message}`, { + code: (_a = e === null || e === void 0 ? void 0 : e.code) !== null && _a !== void 0 ? _a : RestError.REQUEST_SEND_ERROR, + request, + }); + } +} +/** + * Converts PipelineRequest headers to Fetch headers + */ +function buildFetchHeaders(pipelineHeaders) { + const headers = new Headers(); + for (const [name, value] of pipelineHeaders) { + headers.append(name, value); + } + return headers; +} +function buildPipelineHeaders(httpResponse) { + const responseHeaders = createHttpHeaders(); + for (const [name, value] of httpResponse.headers) { + responseHeaders.set(name, value); + } + return responseHeaders; +} +function buildRequestBody(request) { + const body = typeof request.body === "function" ? request.body() : request.body; + if (isNodeReadableStream(body)) { + throw new Error("Node streams are not supported in browser environment."); + } + return isWebReadableStream(body) + ? { streaming: true, body: buildBodyStream(body, { onProgress: request.onUploadProgress }) } + : { streaming: false, body }; +} +/** + * Reads the request/response original stream and stream it through a new + * ReadableStream, this is done to be able to report progress in a way that + * all modern browsers support. TransformStreams would be an alternative, + * however they are not yet supported by all browsers i.e Firefox + */ +function buildBodyStream(readableStream, options = {}) { + let loadedBytes = 0; + const { onProgress, onEnd } = options; + // If the current browser supports pipeThrough we use a TransformStream + // to report progress + if (isTransformStreamSupported(readableStream)) { + return readableStream.pipeThrough(new TransformStream({ + transform(chunk, controller) { + if (chunk === null) { + controller.terminate(); + return; + } + controller.enqueue(chunk); + loadedBytes += chunk.length; + if (onProgress) { + onProgress({ loadedBytes }); + } + }, + flush() { + onEnd === null || onEnd === void 0 ? void 0 : onEnd(); + }, + })); + } + else { + // If we can't use transform streams, wrap the original stream in a new readable stream + // and use pull to enqueue each chunk and report progress. + const reader = readableStream.getReader(); + return new ReadableStream({ + async pull(controller) { + var _a; + const { done, value } = await reader.read(); + // When no more data needs to be consumed, break the reading + if (done || !value) { + onEnd === null || onEnd === void 0 ? void 0 : onEnd(); + // Close the stream + controller.close(); + reader.releaseLock(); + return; + } + loadedBytes += (_a = value === null || value === void 0 ? void 0 : value.length) !== null && _a !== void 0 ? _a : 0; + // Enqueue the next data chunk into our target stream + controller.enqueue(value); + if (onProgress) { + onProgress({ loadedBytes }); + } + }, + cancel(reason) { + onEnd === null || onEnd === void 0 ? void 0 : onEnd(); + return reader.cancel(reason); + }, + }); + } +} +/** + * Create a new HttpClient instance for the browser environment. + * @internal + */ +export function createFetchHttpClient() { + return new FetchHttpClient(); +} +function isTransformStreamSupported(readableStream) { + return readableStream.pipeThrough !== undefined && self.TransformStream !== undefined; +} +//# sourceMappingURL=fetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/httpHeaders.js b/node_modules/@azure/core-rest-pipeline/dist/esm/httpHeaders.js new file mode 100644 index 000000000..c4b7c919f --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/httpHeaders.js @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function normalizeName(name) { + return name.toLowerCase(); +} +function* headerIterator(map) { + for (const entry of map.values()) { + yield [entry.name, entry.value]; + } +} +class HttpHeadersImpl { + constructor(rawHeaders) { + this._headersMap = new Map(); + if (rawHeaders) { + for (const headerName of Object.keys(rawHeaders)) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param name - The name of the header to set. This value is case-insensitive. + * @param value - The value of the header to set. + */ + set(name, value) { + this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param name - The name of the header. This value is case-insensitive. + */ + get(name) { + var _a; + return (_a = this._headersMap.get(normalizeName(name))) === null || _a === void 0 ? void 0 : _a.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + * @param name - The name of the header to set. This value is case-insensitive. + */ + has(name) { + return this._headersMap.has(normalizeName(name)); + } + /** + * Remove the header with the provided headerName. + * @param name - The name of the header to remove. + */ + delete(name) { + this._headersMap.delete(normalizeName(name)); + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJSON(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const entry of this._headersMap.values()) { + result[entry.name] = entry.value; + } + } + else { + for (const [normalizedName, entry] of this._headersMap) { + result[normalizedName] = entry.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJSON({ preserveCase: true })); + } + /** + * Iterate over tuples of header [name, value] pairs. + */ + [Symbol.iterator]() { + return headerIterator(this._headersMap); + } +} +/** + * Creates an object that satisfies the `HttpHeaders` interface. + * @param rawHeaders - A simple object representing initial headers + */ +export function createHttpHeaders(rawHeaders) { + return new HttpHeadersImpl(rawHeaders); +} +//# sourceMappingURL=httpHeaders.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/index.js b/node_modules/@azure/core-rest-pipeline/dist/esm/index.js new file mode 100644 index 000000000..619ce9ca5 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/index.js @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { createEmptyPipeline, } from "./pipeline.js"; +export { createPipelineFromOptions, } from "./createPipelineFromOptions.js"; +export { createDefaultHttpClient } from "./defaultHttpClient.js"; +export { createHttpHeaders } from "./httpHeaders.js"; +export { createPipelineRequest } from "./pipelineRequest.js"; +export { RestError, isRestError } from "./restError.js"; +export { decompressResponsePolicy, decompressResponsePolicyName, } from "./policies/decompressResponsePolicy.js"; +export { exponentialRetryPolicy, exponentialRetryPolicyName, } from "./policies/exponentialRetryPolicy.js"; +export { setClientRequestIdPolicy, setClientRequestIdPolicyName, } from "./policies/setClientRequestIdPolicy.js"; +export { logPolicy, logPolicyName } from "./policies/logPolicy.js"; +export { multipartPolicy, multipartPolicyName } from "./policies/multipartPolicy.js"; +export { proxyPolicy, proxyPolicyName, getDefaultProxySettings } from "./policies/proxyPolicy.js"; +export { redirectPolicy, redirectPolicyName, } from "./policies/redirectPolicy.js"; +export { systemErrorRetryPolicy, systemErrorRetryPolicyName, } from "./policies/systemErrorRetryPolicy.js"; +export { throttlingRetryPolicy, throttlingRetryPolicyName, } from "./policies/throttlingRetryPolicy.js"; +export { retryPolicy } from "./policies/retryPolicy.js"; +export { tracingPolicy, tracingPolicyName, } from "./policies/tracingPolicy.js"; +export { defaultRetryPolicy, } from "./policies/defaultRetryPolicy.js"; +export { userAgentPolicy, userAgentPolicyName, } from "./policies/userAgentPolicy.js"; +export { tlsPolicy, tlsPolicyName } from "./policies/tlsPolicy.js"; +export { formDataPolicy, formDataPolicyName } from "./policies/formDataPolicy.js"; +export { bearerTokenAuthenticationPolicy, bearerTokenAuthenticationPolicyName, } from "./policies/bearerTokenAuthenticationPolicy.js"; +export { ndJsonPolicy, ndJsonPolicyName } from "./policies/ndJsonPolicy.js"; +export { auxiliaryAuthenticationHeaderPolicy, auxiliaryAuthenticationHeaderPolicyName, } from "./policies/auxiliaryAuthenticationHeaderPolicy.js"; +export { createFile, createFileFromStream, } from "./util/file.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/interfaces.js b/node_modules/@azure/core-rest-pipeline/dist/esm/interfaces.js new file mode 100644 index 000000000..c0a2e2e65 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/interfaces.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/log.js b/node_modules/@azure/core-rest-pipeline/dist/esm/log.js new file mode 100644 index 000000000..10a0a4eb6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/log.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +export const logger = createClientLogger("core-rest-pipeline"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/nodeHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/esm/nodeHttpClient.js new file mode 100644 index 000000000..9d49b52a1 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/nodeHttpClient.js @@ -0,0 +1,332 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as http from "node:http"; +import * as https from "node:https"; +import * as zlib from "node:zlib"; +import { Transform } from "node:stream"; +import { AbortError } from "@azure/abort-controller"; +import { createHttpHeaders } from "./httpHeaders.js"; +import { RestError } from "./restError.js"; +import { logger } from "./log.js"; +const DEFAULT_TLS_SETTINGS = {}; +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream) { + return new Promise((resolve) => { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} +function isArrayBuffer(body) { + return body && typeof body.byteLength === "number"; +} +class ReportTransform extends Transform { + // eslint-disable-next-line @typescript-eslint/ban-types + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + try { + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(); + } + catch (e) { + callback(e); + } + } + constructor(progressCallback) { + super(); + this.loadedBytes = 0; + this.progressCallback = progressCallback; + } +} +/** + * A HttpClient implementation that uses Node's "https" module to send HTTPS requests. + * @internal + */ +class NodeHttpClient { + constructor() { + this.cachedHttpsAgents = new WeakMap(); + } + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + var _a, _b, _c; + const abortController = new AbortController(); + let abortListener; + if (request.abortSignal) { + if (request.abortSignal.aborted) { + throw new AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + request.abortSignal.addEventListener("abort", abortListener); + } + if (request.timeout > 0) { + setTimeout(() => { + abortController.abort(); + }, request.timeout); + } + const acceptEncoding = request.headers.get("Accept-Encoding"); + const shouldDecompress = (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("gzip")) || (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("deflate")); + let body = typeof request.body === "function" ? request.body() : request.body; + if (body && !request.headers.has("Content-Length")) { + const bodyLength = getBodyLength(body); + if (bodyLength !== null) { + request.headers.set("Content-Length", bodyLength); + } + } + let responseStream; + try { + if (body && request.onUploadProgress) { + const onUploadProgress = request.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + uploadReportStream.on("error", (e) => { + logger.error("Error in upload progress", e); + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const res = await this.makeRequest(request, abortController, body); + const headers = getResponseHeaders(res); + const status = (_a = res.statusCode) !== null && _a !== void 0 ? _a : 0; + const response = { + status, + headers, + request, + }; + // Responses to HEAD must not have a body. + // If they do return a body, that body must be ignored. + if (request.method === "HEAD") { + // call resume() and not destroy() to avoid closing the socket + // and losing keep alive + res.resume(); + return response; + } + responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; + const onDownloadProgress = request.onDownloadProgress; + if (onDownloadProgress) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + downloadReportStream.on("error", (e) => { + logger.error("Error in download progress", e); + }); + responseStream.pipe(downloadReportStream); + responseStream = downloadReportStream; + } + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(Number.POSITIVE_INFINITY)) || + ((_c = request.streamResponseStatusCodes) === null || _c === void 0 ? void 0 : _c.has(response.status))) { + response.readableStreamBody = responseStream; + } + else { + response.bodyAsText = await streamToText(responseStream); + } + return response; + } + finally { + // clean up event listener + if (request.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(responseStream)) { + downloadStreamDone = isStreamComplete(responseStream); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + var _a; + // eslint-disable-next-line promise/always-return + if (abortListener) { + (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + } + }) + .catch((e) => { + logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + makeRequest(request, abortController, body) { + var _a; + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + const agent = (_a = request.agent) !== null && _a !== void 0 ? _a : this.getOrCreateAgent(request, isInsecure); + const options = { + agent, + hostname: url.hostname, + path: `${url.pathname}${url.search}`, + port: url.port, + method: request.method, + headers: request.headers.toJSON({ preserveCase: true }), + }; + return new Promise((resolve, reject) => { + const req = isInsecure ? http.request(options, resolve) : https.request(options, resolve); + req.once("error", (err) => { + var _a; + reject(new RestError(err.message, { code: (_a = err.code) !== null && _a !== void 0 ? _a : RestError.REQUEST_SEND_ERROR, request })); + }); + abortController.signal.addEventListener("abort", () => { + const abortError = new AbortError("The operation was aborted."); + req.destroy(abortError); + reject(abortError); + }); + if (body && isReadableStream(body)) { + body.pipe(req); + } + else if (body) { + if (typeof body === "string" || Buffer.isBuffer(body)) { + req.end(body); + } + else if (isArrayBuffer(body)) { + req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); + } + else { + logger.error("Unrecognized body type", body); + reject(new RestError("Unrecognized body type")); + } + } + else { + // streams don't like "undefined" being passed as data + req.end(); + } + }); + } + getOrCreateAgent(request, isInsecure) { + var _a; + const disableKeepAlive = request.disableKeepAlive; + // Handle Insecure requests first + if (isInsecure) { + if (disableKeepAlive) { + // keepAlive:false is the default so we don't need a custom Agent + return http.globalAgent; + } + if (!this.cachedHttpAgent) { + // If there is no cached agent create a new one and cache it. + this.cachedHttpAgent = new http.Agent({ keepAlive: true }); + } + return this.cachedHttpAgent; + } + else { + if (disableKeepAlive && !request.tlsSettings) { + // When there are no tlsSettings and keepAlive is false + // we don't need a custom agent + return https.globalAgent; + } + // We use the tlsSettings to index cached clients + const tlsSettings = (_a = request.tlsSettings) !== null && _a !== void 0 ? _a : DEFAULT_TLS_SETTINGS; + // Get the cached agent or create a new one with the + // provided values for keepAlive and tlsSettings + let agent = this.cachedHttpsAgents.get(tlsSettings); + if (agent && agent.options.keepAlive === !disableKeepAlive) { + return agent; + } + logger.info("No cached TLS Agent exist, creating a new Agent"); + agent = new https.Agent(Object.assign({ + // keepAlive is true if disableKeepAlive is false. + keepAlive: !disableKeepAlive }, tlsSettings)); + this.cachedHttpsAgents.set(tlsSettings, agent); + return agent; + } + } +} +function getResponseHeaders(res) { + const headers = createHttpHeaders(); + for (const header of Object.keys(res.headers)) { + const value = res.headers[header]; + if (Array.isArray(value)) { + if (value.length > 0) { + headers.set(header, value[0]); + } + } + else if (value) { + headers.set(header, value); + } + } + return headers; +} +function getDecodedResponseStream(stream, headers) { + const contentEncoding = headers.get("Content-Encoding"); + if (contentEncoding === "gzip") { + const unzip = zlib.createGunzip(); + stream.pipe(unzip); + return unzip; + } + else if (contentEncoding === "deflate") { + const inflate = zlib.createInflate(); + stream.pipe(inflate); + return inflate; + } + return stream; +} +function streamToText(stream) { + return new Promise((resolve, reject) => { + const buffer = []; + stream.on("data", (chunk) => { + if (Buffer.isBuffer(chunk)) { + buffer.push(chunk); + } + else { + buffer.push(Buffer.from(chunk)); + } + }); + stream.on("end", () => { + resolve(Buffer.concat(buffer).toString("utf8")); + }); + stream.on("error", (e) => { + if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { + reject(e); + } + else { + reject(new RestError(`Error reading response as text: ${e.message}`, { + code: RestError.PARSE_ERROR, + })); + } + }); + }); +} +/** @internal */ +export function getBodyLength(body) { + if (!body) { + return 0; + } + else if (Buffer.isBuffer(body)) { + return body.length; + } + else if (isReadableStream(body)) { + return null; + } + else if (isArrayBuffer(body)) { + return body.byteLength; + } + else if (typeof body === "string") { + return Buffer.from(body).length; + } + else { + return null; + } +} +/** + * Create a new HttpClient instance for the NodeJS environment. + * @internal + */ +export function createNodeHttpClient() { + return new NodeHttpClient(); +} +//# sourceMappingURL=nodeHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/package.json b/node_modules/@azure/core-rest-pipeline/dist/esm/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/pipeline.js b/node_modules/@azure/core-rest-pipeline/dist/esm/pipeline.js new file mode 100644 index 000000000..07e8ced7f --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/pipeline.js @@ -0,0 +1,262 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const ValidPhaseNames = new Set(["Deserialize", "Serialize", "Retry", "Sign"]); +/** + * A private implementation of Pipeline. + * Do not export this class from the package. + * @internal + */ +class HttpPipeline { + constructor(policies) { + var _a; + this._policies = []; + this._policies = (_a = policies === null || policies === void 0 ? void 0 : policies.slice(0)) !== null && _a !== void 0 ? _a : []; + this._orderedPolicies = undefined; + } + addPolicy(policy, options = {}) { + if (options.phase && options.afterPhase) { + throw new Error("Policies inside a phase cannot specify afterPhase."); + } + if (options.phase && !ValidPhaseNames.has(options.phase)) { + throw new Error(`Invalid phase name: ${options.phase}`); + } + if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { + throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); + } + this._policies.push({ + policy, + options, + }); + this._orderedPolicies = undefined; + } + removePolicy(options) { + const removedPolicies = []; + this._policies = this._policies.filter((policyDescriptor) => { + if ((options.name && policyDescriptor.policy.name === options.name) || + (options.phase && policyDescriptor.options.phase === options.phase)) { + removedPolicies.push(policyDescriptor.policy); + return false; + } + else { + return true; + } + }); + this._orderedPolicies = undefined; + return removedPolicies; + } + sendRequest(httpClient, request) { + const policies = this.getOrderedPolicies(); + const pipeline = policies.reduceRight((next, policy) => { + return (req) => { + return policy.sendRequest(req, next); + }; + }, (req) => httpClient.sendRequest(req)); + return pipeline(request); + } + getOrderedPolicies() { + if (!this._orderedPolicies) { + this._orderedPolicies = this.orderPolicies(); + } + return this._orderedPolicies; + } + clone() { + return new HttpPipeline(this._policies); + } + static create() { + return new HttpPipeline(); + } + orderPolicies() { + /** + * The goal of this method is to reliably order pipeline policies + * based on their declared requirements when they were added. + * + * Order is first determined by phase: + * + * 1. Serialize Phase + * 2. Policies not in a phase + * 3. Deserialize Phase + * 4. Retry Phase + * 5. Sign Phase + * + * Within each phase, policies are executed in the order + * they were added unless they were specified to execute + * before/after other policies or after a particular phase. + * + * To determine the final order, we will walk the policy list + * in phase order multiple times until all dependencies are + * satisfied. + * + * `afterPolicies` are the set of policies that must be + * executed before a given policy. This requirement is + * considered satisfied when each of the listed policies + * have been scheduled. + * + * `beforePolicies` are the set of policies that must be + * executed after a given policy. Since this dependency + * can be expressed by converting it into a equivalent + * `afterPolicies` declarations, they are normalized + * into that form for simplicity. + * + * An `afterPhase` dependency is considered satisfied when all + * policies in that phase have scheduled. + * + */ + const result = []; + // Track all policies we know about. + const policyMap = new Map(); + function createPhase(name) { + return { + name, + policies: new Set(), + hasRun: false, + hasAfterPolicies: false, + }; + } + // Track policies for each phase. + const serializePhase = createPhase("Serialize"); + const noPhase = createPhase("None"); + const deserializePhase = createPhase("Deserialize"); + const retryPhase = createPhase("Retry"); + const signPhase = createPhase("Sign"); + // a list of phases in order + const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; + // Small helper function to map phase name to each Phase + function getPhase(phase) { + if (phase === "Retry") { + return retryPhase; + } + else if (phase === "Serialize") { + return serializePhase; + } + else if (phase === "Deserialize") { + return deserializePhase; + } + else if (phase === "Sign") { + return signPhase; + } + else { + return noPhase; + } + } + // First walk each policy and create a node to track metadata. + for (const descriptor of this._policies) { + const policy = descriptor.policy; + const options = descriptor.options; + const policyName = policy.name; + if (policyMap.has(policyName)) { + throw new Error("Duplicate policy names not allowed in pipeline"); + } + const node = { + policy, + dependsOn: new Set(), + dependants: new Set(), + }; + if (options.afterPhase) { + node.afterPhase = getPhase(options.afterPhase); + node.afterPhase.hasAfterPolicies = true; + } + policyMap.set(policyName, node); + const phase = getPhase(options.phase); + phase.policies.add(node); + } + // Now that each policy has a node, connect dependency references. + for (const descriptor of this._policies) { + const { policy, options } = descriptor; + const policyName = policy.name; + const node = policyMap.get(policyName); + if (!node) { + throw new Error(`Missing node for policy ${policyName}`); + } + if (options.afterPolicies) { + for (const afterPolicyName of options.afterPolicies) { + const afterNode = policyMap.get(afterPolicyName); + if (afterNode) { + // Linking in both directions helps later + // when we want to notify dependants. + node.dependsOn.add(afterNode); + afterNode.dependants.add(node); + } + } + } + if (options.beforePolicies) { + for (const beforePolicyName of options.beforePolicies) { + const beforeNode = policyMap.get(beforePolicyName); + if (beforeNode) { + // To execute before another node, make it + // depend on the current node. + beforeNode.dependsOn.add(node); + node.dependants.add(beforeNode); + } + } + } + } + function walkPhase(phase) { + phase.hasRun = true; + // Sets iterate in insertion order + for (const node of phase.policies) { + if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { + // If this node is waiting on a phase to complete, + // we need to skip it for now. + // Even if the phase is empty, we should wait for it + // to be walked to avoid re-ordering policies. + continue; + } + if (node.dependsOn.size === 0) { + // If there's nothing else we're waiting for, we can + // add this policy to the result list. + result.push(node.policy); + // Notify anything that depends on this policy that + // the policy has been scheduled. + for (const dependant of node.dependants) { + dependant.dependsOn.delete(node); + } + policyMap.delete(node.policy.name); + phase.policies.delete(node); + } + } + } + function walkPhases() { + for (const phase of orderedPhases) { + walkPhase(phase); + // if the phase isn't complete + if (phase.policies.size > 0 && phase !== noPhase) { + if (!noPhase.hasRun) { + // Try running noPhase to see if that unblocks this phase next tick. + // This can happen if a phase that happens before noPhase + // is waiting on a noPhase policy to complete. + walkPhase(noPhase); + } + // Don't proceed to the next phase until this phase finishes. + return; + } + if (phase.hasAfterPolicies) { + // Run any policies unblocked by this phase + walkPhase(noPhase); + } + } + } + // Iterate until we've put every node in the result list. + let iteration = 0; + while (policyMap.size > 0) { + iteration++; + const initialResultLength = result.length; + // Keep walking each phase in order until we can order every node. + walkPhases(); + // The result list *should* get at least one larger each time + // after the first full pass. + // Otherwise, we're going to loop forever. + if (result.length <= initialResultLength && iteration > 1) { + throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); + } + } + return result; + } +} +/** + * Creates a totally empty pipeline. + * Useful for testing or creating a custom one. + */ +export function createEmptyPipeline() { + return HttpPipeline.create(); +} +//# sourceMappingURL=pipeline.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/pipelineRequest.js b/node_modules/@azure/core-rest-pipeline/dist/esm/pipelineRequest.js new file mode 100644 index 000000000..866575912 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/pipelineRequest.js @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHttpHeaders } from "./httpHeaders.js"; +import { randomUUID } from "@azure/core-util"; +class PipelineRequestImpl { + constructor(options) { + var _a, _b, _c, _d, _e, _f, _g; + this.url = options.url; + this.body = options.body; + this.headers = (_a = options.headers) !== null && _a !== void 0 ? _a : createHttpHeaders(); + this.method = (_b = options.method) !== null && _b !== void 0 ? _b : "GET"; + this.timeout = (_c = options.timeout) !== null && _c !== void 0 ? _c : 0; + this.multipartBody = options.multipartBody; + this.formData = options.formData; + this.disableKeepAlive = (_d = options.disableKeepAlive) !== null && _d !== void 0 ? _d : false; + this.proxySettings = options.proxySettings; + this.streamResponseStatusCodes = options.streamResponseStatusCodes; + this.withCredentials = (_e = options.withCredentials) !== null && _e !== void 0 ? _e : false; + this.abortSignal = options.abortSignal; + this.tracingOptions = options.tracingOptions; + this.onUploadProgress = options.onUploadProgress; + this.onDownloadProgress = options.onDownloadProgress; + this.requestId = options.requestId || randomUUID(); + this.allowInsecureConnection = (_f = options.allowInsecureConnection) !== null && _f !== void 0 ? _f : false; + this.enableBrowserStreams = (_g = options.enableBrowserStreams) !== null && _g !== void 0 ? _g : false; + } +} +/** + * Creates a new pipeline request with the given options. + * This method is to allow for the easy setting of default values and not required. + * @param options - The options to create the request with. + */ +export function createPipelineRequest(options) { + return new PipelineRequestImpl(options); +} +//# sourceMappingURL=pipelineRequest.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/auxiliaryAuthenticationHeaderPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/auxiliaryAuthenticationHeaderPolicy.js new file mode 100644 index 000000000..912154a6a --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/auxiliaryAuthenticationHeaderPolicy.js @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createTokenCycler } from "../util/tokenCycler.js"; +import { logger as coreLogger } from "../log.js"; +/** + * The programmatic identifier of the auxiliaryAuthenticationHeaderPolicy. + */ +export const auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy"; +const AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary"; +async function sendAuthorizeRequest(options) { + var _a, _b; + const { scopes, getAccessToken, request } = options; + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + }; + return (_b = (_a = (await getAccessToken(scopes, getTokenOptions))) === null || _a === void 0 ? void 0 : _a.token) !== null && _b !== void 0 ? _b : ""; +} +/** + * A policy for external tokens to `x-ms-authorization-auxiliary` header. + * This header will be used when creating a cross-tenant application we may need to handle authentication requests + * for resources that are in different tenants. + * You could see [ARM docs](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/authenticate-multi-tenant) for a rundown of how this feature works + */ +export function auxiliaryAuthenticationHeaderPolicy(options) { + const { credentials, scopes } = options; + const logger = options.logger || coreLogger; + const tokenCyclerMap = new WeakMap(); + return { + name: auxiliaryAuthenticationHeaderPolicyName, + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs."); + } + if (!credentials || credentials.length === 0) { + logger.info(`${auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`); + return next(request); + } + const tokenPromises = []; + for (const credential of credentials) { + let getAccessToken = tokenCyclerMap.get(credential); + if (!getAccessToken) { + getAccessToken = createTokenCycler(credential); + tokenCyclerMap.set(credential, getAccessToken); + } + tokenPromises.push(sendAuthorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger, + })); + } + const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token)); + if (auxiliaryTokens.length === 0) { + logger.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`); + return next(request); + } + request.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", ")); + return next(request); + }, + }; +} +//# sourceMappingURL=auxiliaryAuthenticationHeaderPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/bearerTokenAuthenticationPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/bearerTokenAuthenticationPolicy.js new file mode 100644 index 000000000..4ef00eb8d --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/bearerTokenAuthenticationPolicy.js @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createTokenCycler } from "../util/tokenCycler.js"; +import { logger as coreLogger } from "../log.js"; +/** + * The programmatic identifier of the bearerTokenAuthenticationPolicy. + */ +export const bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy"; +/** + * Default authorize request handler + */ +async function defaultAuthorizeRequest(options) { + const { scopes, getAccessToken, request } = options; + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + }; + const accessToken = await getAccessToken(scopes, getTokenOptions); + if (accessToken) { + options.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + } +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * A policy that can request a token from a TokenCredential implementation and + * then apply it to the Authorization header of a request as a Bearer token. + */ +export function bearerTokenAuthenticationPolicy(options) { + var _a; + const { credential, scopes, challengeCallbacks } = options; + const logger = options.logger || coreLogger; + const callbacks = Object.assign({ authorizeRequest: (_a = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequest) !== null && _a !== void 0 ? _a : defaultAuthorizeRequest, authorizeRequestOnChallenge: challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequestOnChallenge }, challengeCallbacks); + // This function encapsulates the entire process of reliably retrieving the token + // The options are left out of the public API until there's demand to configure this. + // Remember to extend `BearerTokenAuthenticationPolicyOptions` with `TokenCyclerOptions` + // in order to pass through the `options` object. + const getAccessToken = credential + ? createTokenCycler(credential /* , options */) + : () => Promise.resolve(null); + return { + name: bearerTokenAuthenticationPolicyName, + /** + * If there's no challenge parameter: + * - It will try to retrieve the token using the cache, or the credential's getToken. + * - Then it will try the next policy with or without the retrieved token. + * + * It uses the challenge parameters to: + * - Skip a first attempt to get the token from the credential if there's no cached token, + * since it expects the token to be retrievable only after the challenge. + * - Prepare the outgoing request if the `prepareRequest` method has been provided. + * - Send an initial request to receive the challenge if it fails. + * - Process a challenge if the response contains it. + * - Retrieve a token with the challenge information, then re-send the request. + */ + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + await callbacks.authorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger, + }); + let response; + let error; + try { + response = await next(request); + } + catch (err) { + error = err; + response = err.response; + } + if (callbacks.authorizeRequestOnChallenge && + (response === null || response === void 0 ? void 0 : response.status) === 401 && + getChallenge(response)) { + // processes challenge + const shouldSendRequest = await callbacks.authorizeRequestOnChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + response, + getAccessToken, + logger, + }); + if (shouldSendRequest) { + return next(request); + } + } + if (error) { + throw error; + } + else { + return response; + } + }, + }; +} +//# sourceMappingURL=bearerTokenAuthenticationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/decompressResponsePolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/decompressResponsePolicy.js new file mode 100644 index 000000000..40e4ac757 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/decompressResponsePolicy.js @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the decompressResponsePolicy. + */ +export const decompressResponsePolicyName = "decompressResponsePolicy"; +/** + * A policy to enable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +export function decompressResponsePolicy() { + return { + name: decompressResponsePolicyName, + async sendRequest(request, next) { + // HEAD requests have no body + if (request.method !== "HEAD") { + request.headers.set("Accept-Encoding", "gzip,deflate"); + } + return next(request); + }, + }; +} +//# sourceMappingURL=decompressResponsePolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/defaultRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/defaultRetryPolicy.js new file mode 100644 index 000000000..bda83eb35 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/defaultRetryPolicy.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { exponentialRetryStrategy } from "../retryStrategies/exponentialRetryStrategy.js"; +import { throttlingRetryStrategy } from "../retryStrategies/throttlingRetryStrategy.js"; +import { retryPolicy } from "./retryPolicy.js"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +/** + * Name of the {@link defaultRetryPolicy} + */ +export const defaultRetryPolicyName = "defaultRetryPolicy"; +/** + * A policy that retries according to three strategies: + * - When the server sends a 429 response with a Retry-After header. + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay. + */ +export function defaultRetryPolicy(options = {}) { + var _a; + return { + name: defaultRetryPolicyName, + sendRequest: retryPolicy([throttlingRetryStrategy(), exponentialRetryStrategy(options)], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=defaultRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/exponentialRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/exponentialRetryPolicy.js new file mode 100644 index 000000000..12193c26b --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/exponentialRetryPolicy.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { exponentialRetryStrategy } from "../retryStrategies/exponentialRetryStrategy.js"; +import { retryPolicy } from "./retryPolicy.js"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +/** + * The programmatic identifier of the exponentialRetryPolicy. + */ +export const exponentialRetryPolicyName = "exponentialRetryPolicy"; +/** + * A policy that attempts to retry requests while introducing an exponentially increasing delay. + * @param options - Options that configure retry logic. + */ +export function exponentialRetryPolicy(options = {}) { + var _a; + return retryPolicy([ + exponentialRetryStrategy(Object.assign(Object.assign({}, options), { ignoreSystemErrors: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }); +} +//# sourceMappingURL=exponentialRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/formDataPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/formDataPolicy.js new file mode 100644 index 000000000..e4816a489 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/formDataPolicy.js @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNodeLike, stringToUint8Array } from "@azure/core-util"; +import { createHttpHeaders } from "../httpHeaders.js"; +/** + * The programmatic identifier of the formDataPolicy. + */ +export const formDataPolicyName = "formDataPolicy"; +function formDataToFormDataMap(formData) { + var _a; + const formDataMap = {}; + for (const [key, value] of formData.entries()) { + (_a = formDataMap[key]) !== null && _a !== void 0 ? _a : (formDataMap[key] = []); + formDataMap[key].push(value); + } + return formDataMap; +} +/** + * A policy that encodes FormData on the request into the body. + */ +export function formDataPolicy() { + return { + name: formDataPolicyName, + async sendRequest(request, next) { + if (isNodeLike && typeof FormData !== "undefined" && request.body instanceof FormData) { + request.formData = formDataToFormDataMap(request.body); + request.body = undefined; + } + if (request.formData) { + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { + request.body = wwwFormUrlEncode(request.formData); + } + else { + await prepareFormData(request.formData, request); + } + request.formData = undefined; + } + return next(request); + }, + }; +} +function wwwFormUrlEncode(formData) { + const urlSearchParams = new URLSearchParams(); + for (const [key, value] of Object.entries(formData)) { + if (Array.isArray(value)) { + for (const subValue of value) { + urlSearchParams.append(key, subValue.toString()); + } + } + else { + urlSearchParams.append(key, value.toString()); + } + } + return urlSearchParams.toString(); +} +async function prepareFormData(formData, request) { + // validate content type (multipart/form-data) + const contentType = request.headers.get("Content-Type"); + if (contentType && !contentType.startsWith("multipart/form-data")) { + // content type is specified and is not multipart/form-data. Exit. + return; + } + request.headers.set("Content-Type", contentType !== null && contentType !== void 0 ? contentType : "multipart/form-data"); + // set body to MultipartRequestBody using content from FormDataMap + const parts = []; + for (const [fieldName, values] of Object.entries(formData)) { + for (const value of Array.isArray(values) ? values : [values]) { + if (typeof value === "string") { + parts.push({ + headers: createHttpHeaders({ + "Content-Disposition": `form-data; name="${fieldName}"`, + }), + body: stringToUint8Array(value, "utf-8"), + }); + } + else if (value === undefined || value === null || typeof value !== "object") { + throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`); + } + else { + // using || instead of ?? here since if value.name is empty we should create a file name + const fileName = value.name || "blob"; + const headers = createHttpHeaders(); + headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); + // again, || is used since an empty value.type means the content type is unset + headers.set("Content-Type", value.type || "application/octet-stream"); + parts.push({ + headers, + body: value, + }); + } + } + } + request.multipartBody = { parts }; +} +//# sourceMappingURL=formDataPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/logPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/logPolicy.js new file mode 100644 index 000000000..e981567c6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/logPolicy.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { logger as coreLogger } from "../log.js"; +import { Sanitizer } from "../util/sanitizer.js"; +/** + * The programmatic identifier of the logPolicy. + */ +export const logPolicyName = "logPolicy"; +/** + * A policy that logs all requests and responses. + * @param options - Options to configure logPolicy. + */ +export function logPolicy(options = {}) { + var _a; + const logger = (_a = options.logger) !== null && _a !== void 0 ? _a : coreLogger.info; + const sanitizer = new Sanitizer({ + additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, + additionalAllowedQueryParameters: options.additionalAllowedQueryParameters, + }); + return { + name: logPolicyName, + async sendRequest(request, next) { + if (!logger.enabled) { + return next(request); + } + logger(`Request: ${sanitizer.sanitize(request)}`); + const response = await next(request); + logger(`Response status code: ${response.status}`); + logger(`Headers: ${sanitizer.sanitize(response.headers)}`); + return response; + }, + }; +} +//# sourceMappingURL=logPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/multipartPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/multipartPolicy.js new file mode 100644 index 000000000..efa3fdcf9 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/multipartPolicy.js @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { randomUUID, stringToUint8Array } from "@azure/core-util"; +import { concat } from "../util/concat.js"; +import { isBlob } from "../util/typeGuards.js"; +function generateBoundary() { + return `----AzSDKFormBoundary${randomUUID()}`; +} +function encodeHeaders(headers) { + let result = ""; + for (const [key, value] of headers) { + result += `${key}: ${value}\r\n`; + } + return result; +} +function getLength(source) { + if (source instanceof Uint8Array) { + return source.byteLength; + } + else if (isBlob(source)) { + // if was created using createFile then -1 means we have an unknown size + return source.size === -1 ? undefined : source.size; + } + else { + return undefined; + } +} +function getTotalLength(sources) { + let total = 0; + for (const source of sources) { + const partLength = getLength(source); + if (partLength === undefined) { + return undefined; + } + else { + total += partLength; + } + } + return total; +} +async function buildRequestBody(request, parts, boundary) { + const sources = [ + stringToUint8Array(`--${boundary}`, "utf-8"), + ...parts.flatMap((part) => [ + stringToUint8Array("\r\n", "utf-8"), + stringToUint8Array(encodeHeaders(part.headers), "utf-8"), + stringToUint8Array("\r\n", "utf-8"), + part.body, + stringToUint8Array(`\r\n--${boundary}`, "utf-8"), + ]), + stringToUint8Array("--\r\n\r\n", "utf-8"), + ]; + const contentLength = getTotalLength(sources); + if (contentLength) { + request.headers.set("Content-Length", contentLength); + } + request.body = await concat(sources); +} +/** + * Name of multipart policy + */ +export const multipartPolicyName = "multipartPolicy"; +const maxBoundaryLength = 70; +const validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`); +function assertValidBoundary(boundary) { + if (boundary.length > maxBoundaryLength) { + throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`); + } + if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) { + throw new Error(`Multipart boundary "${boundary}" contains invalid characters`); + } +} +/** + * Pipeline policy for multipart requests + */ +export function multipartPolicy() { + return { + name: multipartPolicyName, + async sendRequest(request, next) { + var _a; + if (!request.multipartBody) { + return next(request); + } + if (request.body) { + throw new Error("multipartBody and regular body cannot be set at the same time"); + } + let boundary = request.multipartBody.boundary; + const contentTypeHeader = (_a = request.headers.get("Content-Type")) !== null && _a !== void 0 ? _a : "multipart/mixed"; + const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/); + if (!parsedHeader) { + throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`); + } + const [, contentType, parsedBoundary] = parsedHeader; + if (parsedBoundary && boundary && parsedBoundary !== boundary) { + throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`); + } + boundary !== null && boundary !== void 0 ? boundary : (boundary = parsedBoundary); + if (boundary) { + assertValidBoundary(boundary); + } + else { + boundary = generateBoundary(); + } + request.headers.set("Content-Type", `${contentType}; boundary=${boundary}`); + await buildRequestBody(request, request.multipartBody.parts, boundary); + request.multipartBody = undefined; + return next(request); + }, + }; +} +//# sourceMappingURL=multipartPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/ndJsonPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/ndJsonPolicy.js new file mode 100644 index 000000000..3b1fa6fb9 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/ndJsonPolicy.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the ndJsonPolicy. + */ +export const ndJsonPolicyName = "ndJsonPolicy"; +/** + * ndJsonPolicy is a policy used to control keep alive settings for every request. + */ +export function ndJsonPolicy() { + return { + name: ndJsonPolicyName, + async sendRequest(request, next) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return next(request); + }, + }; +} +//# sourceMappingURL=ndJsonPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/proxyPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/proxyPolicy.js new file mode 100644 index 000000000..1ec048c12 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/proxyPolicy.js @@ -0,0 +1,191 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpsProxyAgent } from "https-proxy-agent"; +import { HttpProxyAgent } from "http-proxy-agent"; +import { logger } from "../log.js"; +const HTTPS_PROXY = "HTTPS_PROXY"; +const HTTP_PROXY = "HTTP_PROXY"; +const ALL_PROXY = "ALL_PROXY"; +const NO_PROXY = "NO_PROXY"; +/** + * The programmatic identifier of the proxyPolicy. + */ +export const proxyPolicyName = "proxyPolicy"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +export const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(HTTPS_PROXY); + const allProxy = getEnvironmentValue(ALL_PROXY); + const httpProxy = getEnvironmentValue(HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = new URL(uri).hostname; + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +export function loadNoProxy() { + const noProxy = getEnvironmentValue(NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * This method converts a proxy url into `ProxySettings` for use with ProxyPolicy. + * If no argument is given, it attempts to parse a proxy URL from the environment + * variables `HTTPS_PROXY` or `HTTP_PROXY`. + * @param proxyUrl - The url of the proxy to use. May contain authentication information. + * @deprecated - Internally this method is no longer necessary when setting proxy information. + */ +export function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const parsedUrl = new URL(proxyUrl); + const schema = parsedUrl.protocol ? parsedUrl.protocol + "//" : ""; + return { + host: schema + parsedUrl.hostname, + port: Number.parseInt(parsedUrl.port || "80"), + username: parsedUrl.username, + password: parsedUrl.password, + }; +} +/** + * This method attempts to parse a proxy URL from the environment + * variables `HTTPS_PROXY` or `HTTP_PROXY`. + */ +function getDefaultProxySettingsInternal() { + const envProxy = loadEnvironmentProxyValue(); + return envProxy ? new URL(envProxy) : undefined; +} +function getUrlFromProxySettings(settings) { + let parsedProxyUrl; + try { + parsedProxyUrl = new URL(settings.host); + } + catch (_error) { + throw new Error(`Expecting a valid host string in proxy settings, but found "${settings.host}".`); + } + parsedProxyUrl.port = String(settings.port); + if (settings.username) { + parsedProxyUrl.username = settings.username; + } + if (settings.password) { + parsedProxyUrl.password = settings.password; + } + return parsedProxyUrl; +} +function setProxyAgentOnRequest(request, cachedAgents, proxyUrl) { + // Custom Agent should take precedence so if one is present + // we should skip to avoid overwriting it. + if (request.agent) { + return; + } + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (request.tlsSettings) { + logger.warning("TLS settings are not supported in combination with custom Proxy, certificates provided to the client will be ignored."); + } + const headers = request.headers.toJSON(); + if (isInsecure) { + if (!cachedAgents.httpProxyAgent) { + cachedAgents.httpProxyAgent = new HttpProxyAgent(proxyUrl, { headers }); + } + request.agent = cachedAgents.httpProxyAgent; + } + else { + if (!cachedAgents.httpsProxyAgent) { + cachedAgents.httpsProxyAgent = new HttpsProxyAgent(proxyUrl, { headers }); + } + request.agent = cachedAgents.httpsProxyAgent; + } +} +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +export function proxyPolicy(proxySettings, options) { + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); + } + const defaultProxy = proxySettings + ? getUrlFromProxySettings(proxySettings) + : getDefaultProxySettingsInternal(); + const cachedAgents = {}; + return { + name: proxyPolicyName, + async sendRequest(request, next) { + var _a; + if (!request.proxySettings && + defaultProxy && + !isBypassed(request.url, (_a = options === null || options === void 0 ? void 0 : options.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, (options === null || options === void 0 ? void 0 : options.customNoProxyList) ? undefined : globalBypassedMap)) { + setProxyAgentOnRequest(request, cachedAgents, defaultProxy); + } + else if (request.proxySettings) { + setProxyAgentOnRequest(request, cachedAgents, getUrlFromProxySettings(request.proxySettings)); + } + return next(request); + }, + }; +} +//# sourceMappingURL=proxyPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/redirectPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/redirectPolicy.js new file mode 100644 index 000000000..302a1b8ef --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/redirectPolicy.js @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the redirectPolicy. + */ +export const redirectPolicyName = "redirectPolicy"; +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +/** + * A policy to follow Location headers from the server in order + * to support server-side redirection. + * In the browser, this policy is not used. + * @param options - Options to control policy behavior. + */ +export function redirectPolicy(options = {}) { + const { maxRetries = 20 } = options; + return { + name: redirectPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return handleRedirect(next, response, maxRetries); + }, + }; +} +async function handleRedirect(next, response, maxRetries, currentRetries = 0) { + const { request, status, headers } = response; + const locationHeader = headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + currentRetries < maxRetries) { + const url = new URL(locationHeader, request.url); + request.url = url.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + request.headers.delete("Content-Length"); + delete request.body; + } + request.headers.delete("Authorization"); + const res = await next(request); + return handleRedirect(next, res, maxRetries, currentRetries + 1); + } + return response; +} +//# sourceMappingURL=redirectPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/retryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/retryPolicy.js new file mode 100644 index 000000000..f58396f45 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/retryPolicy.js @@ -0,0 +1,106 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { delay } from "../util/helpers.js"; +import { createClientLogger } from "@azure/logger"; +import { AbortError } from "@azure/abort-controller"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +const retryPolicyLogger = createClientLogger("core-rest-pipeline retryPolicy"); +/** + * The programmatic identifier of the retryPolicy. + */ +const retryPolicyName = "retryPolicy"; +/** + * retryPolicy is a generic policy to enable retrying requests when certain conditions are met + */ +export function retryPolicy(strategies, options = { maxRetries: DEFAULT_RETRY_POLICY_COUNT }) { + const logger = options.logger || retryPolicyLogger; + return { + name: retryPolicyName, + async sendRequest(request, next) { + var _a, _b; + let response; + let responseError; + let retryCount = -1; + // eslint-disable-next-line no-constant-condition + retryRequest: while (true) { + retryCount += 1; + response = undefined; + responseError = undefined; + try { + logger.info(`Retry ${retryCount}: Attempting to send request`, request.requestId); + response = await next(request); + logger.info(`Retry ${retryCount}: Received a response from request`, request.requestId); + } + catch (e) { + logger.error(`Retry ${retryCount}: Received an error from request`, request.requestId); + // RestErrors are valid targets for the retry strategies. + // If none of the retry strategies can work with them, they will be thrown later in this policy. + // If the received error is not a RestError, it is immediately thrown. + responseError = e; + if (!e || responseError.name !== "RestError") { + throw e; + } + response = responseError.response; + } + if ((_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + logger.error(`Retry ${retryCount}: Request aborted.`); + const abortError = new AbortError(); + throw abortError; + } + if (retryCount >= ((_b = options.maxRetries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_POLICY_COUNT)) { + logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); + if (responseError) { + throw responseError; + } + else if (response) { + return response; + } + else { + throw new Error("Maximum retries reached with no response or error to throw"); + } + } + logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); + strategiesLoop: for (const strategy of strategies) { + const strategyLogger = strategy.logger || retryPolicyLogger; + strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); + const modifiers = strategy.retry({ + retryCount, + response, + responseError, + }); + if (modifiers.skipStrategy) { + strategyLogger.info(`Retry ${retryCount}: Skipped.`); + continue strategiesLoop; + } + const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; + if (errorToThrow) { + strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); + throw errorToThrow; + } + if (retryAfterInMs || retryAfterInMs === 0) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); + await delay(retryAfterInMs, undefined, { abortSignal: request.abortSignal }); + continue retryRequest; + } + if (redirectTo) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); + request.url = redirectTo; + continue retryRequest; + } + } + if (responseError) { + logger.info(`None of the retry strategies could work with the received error. Throwing it.`); + throw responseError; + } + if (response) { + logger.info(`None of the retry strategies could work with the received response. Returning it.`); + return response; + } + // If all the retries skip and there's no response, + // we're still in the retry loop, so a new request will be sent + // until `maxRetries` is reached. + } + }, + }; +} +//# sourceMappingURL=retryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/setClientRequestIdPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/setClientRequestIdPolicy.js new file mode 100644 index 000000000..46baba4cd --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/setClientRequestIdPolicy.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the setClientRequestIdPolicy. + */ +export const setClientRequestIdPolicyName = "setClientRequestIdPolicy"; +/** + * Each PipelineRequest gets a unique id upon creation. + * This policy passes that unique id along via an HTTP header to enable better + * telemetry and tracing. + * @param requestIdHeaderName - The name of the header to pass the request ID to. + */ +export function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + name: setClientRequestIdPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(requestIdHeaderName)) { + request.headers.set(requestIdHeaderName, request.requestId); + } + return next(request); + }, + }; +} +//# sourceMappingURL=setClientRequestIdPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/systemErrorRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/systemErrorRetryPolicy.js new file mode 100644 index 000000000..f6b608a75 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/systemErrorRetryPolicy.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { exponentialRetryStrategy } from "../retryStrategies/exponentialRetryStrategy.js"; +import { retryPolicy } from "./retryPolicy.js"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +/** + * Name of the {@link systemErrorRetryPolicy} + */ +export const systemErrorRetryPolicyName = "systemErrorRetryPolicy"; +/** + * A retry policy that specifically seeks to handle errors in the + * underlying transport layer (e.g. DNS lookup failures) rather than + * retryable error codes from the server itself. + * @param options - Options that customize the policy. + */ +export function systemErrorRetryPolicy(options = {}) { + var _a; + return { + name: systemErrorRetryPolicyName, + sendRequest: retryPolicy([ + exponentialRetryStrategy(Object.assign(Object.assign({}, options), { ignoreHttpStatusCodes: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=systemErrorRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/throttlingRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/throttlingRetryPolicy.js new file mode 100644 index 000000000..9349e6e07 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/throttlingRetryPolicy.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { throttlingRetryStrategy } from "../retryStrategies/throttlingRetryStrategy.js"; +import { retryPolicy } from "./retryPolicy.js"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +/** + * Name of the {@link throttlingRetryPolicy} + */ +export const throttlingRetryPolicyName = "throttlingRetryPolicy"; +/** + * A policy that retries when the server sends a 429 response with a Retry-After header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * + * @param options - Options that configure retry logic. + */ +export function throttlingRetryPolicy(options = {}) { + var _a; + return { + name: throttlingRetryPolicyName, + sendRequest: retryPolicy([throttlingRetryStrategy()], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=throttlingRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/tlsPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/tlsPolicy.js new file mode 100644 index 000000000..67ce535c6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/tlsPolicy.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Name of the TLS Policy + */ +export const tlsPolicyName = "tlsPolicy"; +/** + * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication. + */ +export function tlsPolicy(tlsSettings) { + return { + name: tlsPolicyName, + sendRequest: async (req, next) => { + // Users may define a request tlsSettings, honor those over the client level one + if (!req.tlsSettings) { + req.tlsSettings = tlsSettings; + } + return next(req); + }, + }; +} +//# sourceMappingURL=tlsPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/tracingPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/tracingPolicy.js new file mode 100644 index 000000000..d077e7733 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/tracingPolicy.js @@ -0,0 +1,120 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createTracingClient, } from "@azure/core-tracing"; +import { SDK_VERSION } from "../constants.js"; +import { getUserAgentValue } from "../util/userAgent.js"; +import { logger } from "../log.js"; +import { getErrorMessage, isError } from "@azure/core-util"; +import { isRestError } from "../restError.js"; +/** + * The programmatic identifier of the tracingPolicy. + */ +export const tracingPolicyName = "tracingPolicy"; +/** + * A simple policy to create OpenTelemetry Spans for each request made by the pipeline + * that has SpanOptions with a parent. + * Requests made without a parent Span will not be recorded. + * @param options - Options to configure the telemetry logged by the tracing policy. + */ +export function tracingPolicy(options = {}) { + const userAgent = getUserAgentValue(options.userAgentPrefix); + const tracingClient = tryCreateTracingClient(); + return { + name: tracingPolicyName, + async sendRequest(request, next) { + var _a, _b; + if (!tracingClient || !((_a = request.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext)) { + return next(request); + } + const { span, tracingContext } = (_b = tryCreateSpan(tracingClient, request, userAgent)) !== null && _b !== void 0 ? _b : {}; + if (!span || !tracingContext) { + return next(request); + } + try { + const response = await tracingClient.withContext(tracingContext, next, request); + tryProcessResponse(span, response); + return response; + } + catch (err) { + tryProcessError(span, err); + throw err; + } + }, + }; +} +function tryCreateTracingClient() { + try { + return createTracingClient({ + namespace: "", + packageName: "@azure/core-rest-pipeline", + packageVersion: SDK_VERSION, + }); + } + catch (e) { + logger.warning(`Error when creating the TracingClient: ${getErrorMessage(e)}`); + return undefined; + } +} +function tryCreateSpan(tracingClient, request, userAgent) { + try { + // As per spec, we do not need to differentiate between HTTP and HTTPS in span name. + const { span, updatedOptions } = tracingClient.startSpan(`HTTP ${request.method}`, { tracingOptions: request.tracingOptions }, { + spanKind: "client", + spanAttributes: { + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId, + }, + }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; + } + if (userAgent) { + span.setAttribute("http.user_agent", userAgent); + } + // set headers + const headers = tracingClient.createRequestHeaders(updatedOptions.tracingOptions.tracingContext); + for (const [key, value] of Object.entries(headers)) { + request.headers.set(key, value); + } + return { span, tracingContext: updatedOptions.tracingOptions.tracingContext }; + } + catch (e) { + logger.warning(`Skipping creating a tracing span due to an error: ${getErrorMessage(e)}`); + return undefined; + } +} +function tryProcessError(span, error) { + try { + span.setStatus({ + status: "error", + error: isError(error) ? error : undefined, + }); + if (isRestError(error) && error.statusCode) { + span.setAttribute("http.status_code", error.statusCode); + } + span.end(); + } + catch (e) { + logger.warning(`Skipping tracing span processing due to an error: ${getErrorMessage(e)}`); + } +} +function tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + status: "success", + }); + span.end(); + } + catch (e) { + logger.warning(`Skipping tracing span processing due to an error: ${getErrorMessage(e)}`); + } +} +//# sourceMappingURL=tracingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/policies/userAgentPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/userAgentPolicy.js new file mode 100644 index 000000000..10ae6852c --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/policies/userAgentPolicy.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getUserAgentHeaderName, getUserAgentValue } from "../util/userAgent.js"; +const UserAgentHeaderName = getUserAgentHeaderName(); +/** + * The programmatic identifier of the userAgentPolicy. + */ +export const userAgentPolicyName = "userAgentPolicy"; +/** + * A policy that sets the User-Agent header (or equivalent) to reflect + * the library version. + * @param options - Options to customize the user agent value. + */ +export function userAgentPolicy(options = {}) { + const userAgentValue = getUserAgentValue(options.userAgentPrefix); + return { + name: userAgentPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(UserAgentHeaderName)) { + request.headers.set(UserAgentHeaderName, userAgentValue); + } + return next(request); + }, + }; +} +//# sourceMappingURL=userAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/restError.js b/node_modules/@azure/core-rest-pipeline/dist/esm/restError.js new file mode 100644 index 000000000..9008050e6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/restError.js @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isError } from "@azure/core-util"; +import { custom } from "./util/inspect.js"; +import { Sanitizer } from "./util/sanitizer.js"; +const errorSanitizer = new Sanitizer(); +/** + * A custom error type for failed pipeline requests. + */ +export class RestError extends Error { + constructor(message, options = {}) { + super(message); + this.name = "RestError"; + this.code = options.code; + this.statusCode = options.statusCode; + this.request = options.request; + this.response = options.response; + Object.setPrototypeOf(this, RestError.prototype); + } + /** + * Logging method for util.inspect in Node + */ + [custom]() { + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; + } +} +/** + * Something went wrong when making the request. + * This means the actual request failed for some reason, + * such as a DNS issue or the connection being lost. + */ +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * This means that parsing the response from the server failed. + * It may have been malformed. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; +/** + * Typeguard for RestError + * @param e - Something caught by a catch clause. + */ +export function isRestError(e) { + if (e instanceof RestError) { + return true; + } + return isError(e) && e.name === "RestError"; +} +//# sourceMappingURL=restError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/retryStrategies/exponentialRetryStrategy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/retryStrategies/exponentialRetryStrategy.js new file mode 100644 index 000000000..c1e5c0d4e --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/retryStrategies/exponentialRetryStrategy.js @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getRandomIntegerInclusive } from "@azure/core-util"; +import { isThrottlingRetryResponse } from "./throttlingRetryStrategy.js"; +// intervals are in milliseconds +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 64; +/** + * A retry strategy that retries with an exponentially increasing delay in these two cases: + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails (408, greater or equal than 500, except for 501 and 505). + */ +export function exponentialRetryStrategy(options = {}) { + var _a, _b; + const retryInterval = (_a = options.retryDelayInMs) !== null && _a !== void 0 ? _a : DEFAULT_CLIENT_RETRY_INTERVAL; + const maxRetryInterval = (_b = options.maxRetryDelayInMs) !== null && _b !== void 0 ? _b : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + let retryAfterInMs = retryInterval; + return { + name: "exponentialRetryStrategy", + retry({ retryCount, response, responseError }) { + const matchedSystemError = isSystemError(responseError); + const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; + const isExponential = isExponentialRetryResponse(response); + const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; + const unknownResponse = response && (isThrottlingRetryResponse(response) || !isExponential); + if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { + return { skipStrategy: true }; + } + if (responseError && !matchedSystemError && !isExponential) { + return { errorToThrow: responseError }; + } + // Exponentially increase the delay each time + const exponentialDelay = retryAfterInMs * Math.pow(2, retryCount); + // Don't let the delay exceed the maximum + const clampedExponentialDelay = Math.min(maxRetryInterval, exponentialDelay); + // Allow the final value to have some "jitter" (within 50% of the delay size) so + // that retries across multiple clients don't occur simultaneously. + retryAfterInMs = + clampedExponentialDelay / 2 + getRandomIntegerInclusive(0, clampedExponentialDelay / 2); + return { retryAfterInMs }; + }, + }; +} +/** + * A response is a retry response if it has status codes: + * - 408, or + * - Greater or equal than 500, except for 501 and 505. + */ +export function isExponentialRetryResponse(response) { + return Boolean(response && + response.status !== undefined && + (response.status >= 500 || response.status === 408) && + response.status !== 501 && + response.status !== 505); +} +/** + * Determines whether an error from a pipeline response was triggered in the network layer. + */ +export function isSystemError(err) { + if (!err) { + return false; + } + return (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT" || + err.code === "ENOTFOUND"); +} +//# sourceMappingURL=exponentialRetryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/retryStrategies/retryStrategy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/retryStrategies/retryStrategy.js new file mode 100644 index 000000000..4b2354b00 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/retryStrategies/retryStrategy.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=retryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/retryStrategies/throttlingRetryStrategy.js b/node_modules/@azure/core-rest-pipeline/dist/esm/retryStrategies/throttlingRetryStrategy.js new file mode 100644 index 000000000..5990ccc90 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/retryStrategies/throttlingRetryStrategy.js @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { parseHeaderValueAsNumber } from "../util/helpers.js"; +/** + * The header that comes back from Azure services representing + * the amount of time (minimum) to wait to retry (in seconds or timestamp after which we can retry). + */ +const RetryAfterHeader = "Retry-After"; +/** + * The headers that come back from Azure services representing + * the amount of time (minimum) to wait to retry. + * + * "retry-after-ms", "x-ms-retry-after-ms" : milliseconds + * "Retry-After" : seconds or timestamp + */ +const AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; +/** + * A response is a throttling retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + * + * Returns the `retryAfterInMs` value if the response is a throttling retry response. + * If not throttling retry response, returns `undefined`. + * + * @internal + */ +function getRetryAfterInMs(response) { + if (!(response && [429, 503].includes(response.status))) + return undefined; + try { + // Headers: "retry-after-ms", "x-ms-retry-after-ms", "Retry-After" + for (const header of AllRetryAfterHeaders) { + const retryAfterValue = parseHeaderValueAsNumber(response, header); + if (retryAfterValue === 0 || retryAfterValue) { + // "Retry-After" header ==> seconds + // "retry-after-ms", "x-ms-retry-after-ms" headers ==> milli-seconds + const multiplyingFactor = header === RetryAfterHeader ? 1000 : 1; + return retryAfterValue * multiplyingFactor; // in milli-seconds + } + } + // RetryAfterHeader ("Retry-After") has a special case where it might be formatted as a date instead of a number of seconds + const retryAfterHeader = response.headers.get(RetryAfterHeader); + if (!retryAfterHeader) + return; + const date = Date.parse(retryAfterHeader); + const diff = date - Date.now(); + // negative diff would mean a date in the past, so retry asap with 0 milliseconds + return Number.isFinite(diff) ? Math.max(0, diff) : undefined; + } + catch (e) { + return undefined; + } +} +/** + * A response is a retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + */ +export function isThrottlingRetryResponse(response) { + return Number.isFinite(getRetryAfterInMs(response)); +} +export function throttlingRetryStrategy() { + return { + name: "throttlingRetryStrategy", + retry({ response }) { + const retryAfterInMs = getRetryAfterInMs(response); + if (!Number.isFinite(retryAfterInMs)) { + return { skipStrategy: true }; + } + return { + retryAfterInMs, + }; + }, + }; +} +//# sourceMappingURL=throttlingRetryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/util/concat.js b/node_modules/@azure/core-rest-pipeline/dist/esm/util/concat.js new file mode 100644 index 000000000..3e820e883 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/util/concat.js @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { __asyncGenerator, __asyncValues, __await } from "tslib"; +import { Readable } from "node:stream"; +import { isBlob } from "./typeGuards.js"; +import { getRawContent } from "./file.js"; +function streamAsyncIterator() { + return __asyncGenerator(this, arguments, function* streamAsyncIterator_1() { + const reader = this.getReader(); + try { + while (true) { + const { done, value } = yield __await(reader.read()); + if (done) { + return yield __await(void 0); + } + yield yield __await(value); + } + } + finally { + reader.releaseLock(); + } + }); +} +function makeAsyncIterable(webStream) { + if (!webStream[Symbol.asyncIterator]) { + webStream[Symbol.asyncIterator] = streamAsyncIterator.bind(webStream); + } + if (!webStream.values) { + webStream.values = streamAsyncIterator.bind(webStream); + } +} +function ensureNodeStream(stream) { + if (stream instanceof ReadableStream) { + makeAsyncIterable(stream); + return Readable.fromWeb(stream); + } + else { + return stream; + } +} +function toStream(source) { + if (source instanceof Uint8Array) { + return Readable.from(Buffer.from(source)); + } + else if (isBlob(source)) { + return toStream(getRawContent(source)); + } + else { + return ensureNodeStream(source); + } +} +/** + * Utility function that concatenates a set of binary inputs into one combined output. + * + * @param sources - array of sources for the concatenation + * @returns - in Node, a (() =\> NodeJS.ReadableStream) which, when read, produces a concatenation of all the inputs. + * In browser, returns a `Blob` representing all the concatenated inputs. + * + * @internal + */ +export async function concat(sources) { + return function () { + const streams = sources.map((x) => (typeof x === "function" ? x() : x)).map(toStream); + return Readable.from((function () { + return __asyncGenerator(this, arguments, function* () { + var _a, e_1, _b, _c; + for (const stream of streams) { + try { + for (var _d = true, stream_1 = (e_1 = void 0, __asyncValues(stream)), stream_1_1; stream_1_1 = yield __await(stream_1.next()), _a = stream_1_1.done, !_a; _d = true) { + _c = stream_1_1.value; + _d = false; + const chunk = _c; + yield yield __await(chunk); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = stream_1.return)) yield __await(_b.call(stream_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + }); + })()); + }; +} +//# sourceMappingURL=concat.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/util/file.js b/node_modules/@azure/core-rest-pipeline/dist/esm/util/file.js new file mode 100644 index 000000000..0f271810e --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/util/file.js @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNodeLike } from "@azure/core-util"; +import { isNodeReadableStream } from "./typeGuards.js"; +const unimplementedMethods = { + arrayBuffer: () => { + throw new Error("Not implemented"); + }, + slice: () => { + throw new Error("Not implemented"); + }, + text: () => { + throw new Error("Not implemented"); + }, +}; +/** + * Private symbol used as key on objects created using createFile containing the + * original source of the file object. + * + * This is used in Node to access the original Node stream without using Blob#stream, which + * returns a web stream. This is done to avoid a couple of bugs to do with Blob#stream and + * Readable#to/fromWeb in Node versions we support: + * - https://github.com/nodejs/node/issues/42694 (fixed in Node 18.14) + * - https://github.com/nodejs/node/issues/48916 (fixed in Node 20.6) + * + * Once these versions are no longer supported, we may be able to stop doing this. + * + * @internal + */ +const rawContent = Symbol("rawContent"); +function hasRawContent(x) { + return typeof x[rawContent] === "function"; +} +/** + * Extract the raw content from a given blob-like object. If the input was created using createFile + * or createFileFromStream, the exact content passed into createFile/createFileFromStream will be used. + * For true instances of Blob and File, returns the blob's content as a Web ReadableStream. + * + * @internal + */ +export function getRawContent(blob) { + if (hasRawContent(blob)) { + return blob[rawContent](); + } + else { + return blob.stream(); + } +} +/** + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function to: + * - Create a File object for use in RequestBodyType.formData in environments where the + * global File object is unavailable. + * - Create a File-like object from a readable stream without reading the stream into memory. + * + * @param stream - the content of the file as a callback returning a stream. When a File object made using createFile is + * passed in a request's form data map, the stream will not be read into memory + * and instead will be streamed when the request is made. In the event of a retry, the + * stream needs to be read again, so this callback SHOULD return a fresh stream if possible. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. + */ +export function createFileFromStream(stream, name, options = {}) { + var _a, _b, _c, _d; + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: (_d = options.size) !== null && _d !== void 0 ? _d : -1, name, stream: () => { + const s = stream(); + if (isNodeReadableStream(s)) { + throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); + } + return s; + }, [rawContent]: stream }); +} +/** + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function create a File object for use in RequestBodyType.formData in environments where the global File object is unavailable. + * + * @param content - the content of the file as a Uint8Array in memory. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. + */ +export function createFile(content, name, options = {}) { + var _a, _b, _c; + if (isNodeLike) { + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: content.byteLength, name, arrayBuffer: async () => content.buffer, stream: () => new Blob([content]).stream(), [rawContent]: () => content }); + } + else { + return new File([content], name, options); + } +} +//# sourceMappingURL=file.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/util/helpers.js b/node_modules/@azure/core-rest-pipeline/dist/esm/util/helpers.js new file mode 100644 index 000000000..a1c1183ce --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/util/helpers.js @@ -0,0 +1,58 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +const StandardAbortMessage = "The operation was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * - abortSignal - The abortSignal associated with containing operation. + * - abortErrorMsg - The abort error message associated with containing operation. + * @returns Resolved promise + */ +export function delay(delayInMs, value, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + return reject(new AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (timer) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(value); + }, delayInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); +} +/** + * @internal + * @returns the parsed value or undefined if the parsed value is invalid. + */ +export function parseHeaderValueAsNumber(response, headerName) { + const value = response.headers.get(headerName); + if (!value) + return; + const valueAsNum = Number(value); + if (Number.isNaN(valueAsNum)) + return; + return valueAsNum; +} +//# sourceMappingURL=helpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/util/inspect.js b/node_modules/@azure/core-rest-pipeline/dist/esm/util/inspect.js new file mode 100644 index 000000000..c4012807c --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/util/inspect.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { inspect } from "node:util"; +export const custom = inspect.custom; +//# sourceMappingURL=inspect.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/util/sanitizer.js b/node_modules/@azure/core-rest-pipeline/dist/esm/util/sanitizer.js new file mode 100644 index 000000000..4ea4b25b1 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/util/sanitizer.js @@ -0,0 +1,139 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObject } from "@azure/core-util"; +const RedactedString = "REDACTED"; +// Make sure this list is up-to-date with the one under core/logger/Readme#Keyconcepts +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; +/** + * @internal + */ +export class Sanitizer { + constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [], } = {}) { + allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); + allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + } + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + } + if (key === "headers") { + return this.sanitizeHeaders(value); + } + else if (key === "url") { + return this.sanitizeUrl(value); + } + else if (key === "query") { + return this.sanitizeQuery(value); + } + else if (key === "body") { + // Don't log the request body + return undefined; + } + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || isObject(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeHeaders(obj) { + const sanitized = {}; + for (const key of Object.keys(obj)) { + if (this.allowedHeaderNames.has(key.toLowerCase())) { + sanitized[key] = obj[key]; + } + else { + sanitized[key] = RedactedString; + } + } + return sanitized; + } + sanitizeQuery(value) { + if (typeof value !== "object" || value === null) { + return value; + } + const sanitized = {}; + for (const k of Object.keys(value)) { + if (this.allowedQueryParameters.has(k.toLowerCase())) { + sanitized[k] = value[k]; + } + else { + sanitized[k] = RedactedString; + } + } + return sanitized; + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null) { + return value; + } + const url = new URL(value); + if (!url.search) { + return value; + } + for (const [key] of url.searchParams) { + if (!this.allowedQueryParameters.has(key.toLowerCase())) { + url.searchParams.set(key, RedactedString); + } + } + return url.toString(); + } +} +//# sourceMappingURL=sanitizer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/util/tokenCycler.js b/node_modules/@azure/core-rest-pipeline/dist/esm/util/tokenCycler.js new file mode 100644 index 000000000..514c6cb16 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/util/tokenCycler.js @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { delay } from "./helpers.js"; +// Default options for the cycler if none are provided +export const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires + retryIntervalInMs: 3000, // Allow refresh attempts every 3s + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - A function that produces a promise of an access token that may fail by returning null. + * @param retryIntervalInMs - The time (in milliseconds) to wait between retry attempts. + * @param refreshTimeout - The timestamp after which the refresh attempt will fail, throwing an exception. + * @returns - A promise that, if it resolves, will resolve with an access token. + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < refreshTimeout) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +export function createTokenCycler(credential, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + let tenantId; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(scopes, getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + tenantId = getTokenOptions.tenantId; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + tenantId = undefined; + throw reason; + }); + } + return refreshWorker; + } + return async (scopes, tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + // If the tenantId passed in token options is different to the one we have + // Or if we are in claim challenge and the token was rejected and a new access token need to be issued, we need to + // refresh the token with the new tenantId or token. + const mustRefresh = tenantId !== tokenOptions.tenantId || Boolean(tokenOptions.claims) || cycler.mustRefresh; + if (mustRefresh) + return refresh(scopes, tokenOptions); + if (cycler.shouldRefresh) { + refresh(scopes, tokenOptions); + } + return token; + }; +} +//# sourceMappingURL=tokenCycler.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/util/typeGuards.js b/node_modules/@azure/core-rest-pipeline/dist/esm/util/typeGuards.js new file mode 100644 index 000000000..2baffad6a --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/util/typeGuards.js @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function isNodeReadableStream(x) { + return Boolean(x && typeof x["pipe"] === "function"); +} +export function isWebReadableStream(x) { + return Boolean(x && + typeof x.getReader === "function" && + typeof x.tee === "function"); +} +export function isReadableStream(x) { + return isNodeReadableStream(x) || isWebReadableStream(x); +} +export function isBlob(x) { + return typeof x.stream === "function"; +} +//# sourceMappingURL=typeGuards.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/util/userAgent.js b/node_modules/@azure/core-rest-pipeline/dist/esm/util/userAgent.js new file mode 100644 index 000000000..afdddc19b --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/util/userAgent.js @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getHeaderName, setPlatformSpecificData } from "./userAgentPlatform.js"; +import { SDK_VERSION } from "../constants.js"; +function getUserAgentString(telemetryInfo) { + const parts = []; + for (const [key, value] of telemetryInfo) { + const token = value ? `${key}/${value}` : key; + parts.push(token); + } + return parts.join(" "); +} +/** + * @internal + */ +export function getUserAgentHeaderName() { + return getHeaderName(); +} +/** + * @internal + */ +export function getUserAgentValue(prefix) { + const runtimeInfo = new Map(); + runtimeInfo.set("core-rest-pipeline", SDK_VERSION); + setPlatformSpecificData(runtimeInfo); + const defaultAgent = getUserAgentString(runtimeInfo); + const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; + return userAgentValue; +} +//# sourceMappingURL=userAgent.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/util/userAgentPlatform.js b/node_modules/@azure/core-rest-pipeline/dist/esm/util/userAgentPlatform.js new file mode 100644 index 000000000..4f8a469a9 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/util/userAgentPlatform.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as os from "node:os"; +import * as process from "node:process"; +/** + * @internal + */ +export function getHeaderName() { + return "User-Agent"; +} +/** + * @internal + */ +export function setPlatformSpecificData(map) { + const versions = process.versions; + if (versions.bun) { + map.set("Bun", versions.bun); + } + else if (versions.deno) { + map.set("Deno", versions.deno); + } + else if (versions.node) { + map.set("Node", versions.node); + } + map.set("OS", `(${os.arch()}-${os.type()}-${os.release()})`); +} +//# sourceMappingURL=userAgentPlatform.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/esm/xhrHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/esm/xhrHttpClient.js new file mode 100644 index 000000000..925d1d6ba --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/esm/xhrHttpClient.js @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +import { createHttpHeaders } from "./httpHeaders.js"; +import { RestError } from "./restError.js"; +import { isReadableStream } from "./util/typeGuards.js"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + * @internal + */ +class XhrHttpClient { + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + var _a; + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + const xhr = new XMLHttpRequest(); + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + const abortSignal = request.abortSignal; + if (abortSignal) { + if (abortSignal.aborted) { + throw new AbortError("The operation was aborted."); + } + const listener = () => { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener); + xhr.addEventListener("readystatechange", () => { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (const [name, value] of request.headers) { + xhr.setRequestHeader(name, value); + } + xhr.responseType = ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.size) ? "blob" : "text"; + const body = typeof request.body === "function" ? request.body() : request.body; + if (isReadableStream(body)) { + throw new Error("streams are not supported in XhrHttpClient."); + } + xhr.send(body === undefined ? null : body); + if (xhr.responseType === "blob") { + return new Promise((resolve, reject) => { + handleBlobResponse(xhr, request, resolve, reject); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", () => resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + })); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + } +} +function handleBlobResponse(xhr, request, res, rej) { + xhr.addEventListener("readystatechange", () => { + var _a, _b; + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(Number.POSITIVE_INFINITY)) || + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(xhr.status))) { + const blobBody = new Promise((resolve, reject) => { + xhr.addEventListener("load", () => { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody, + }); + } + else { + xhr.addEventListener("load", () => { + // xhr.response is of Blob type if the request is sent with xhr.responseType === "blob" + // but the status code is not one of the stream response status codes, + // so treat it as text and convert from Blob to text + if (xhr.response) { + xhr.response + .text() + .then((text) => { + res({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: text, + }); + return; + }) + .catch((e) => { + rej(e); + }); + } + else { + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + }); + } + }); + } + } + }); +} +function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", (rawEvent) => listener({ + loadedBytes: rawEvent.loaded, + })); + } +} +function parseHeaders(xhr) { + const responseHeaders = createHttpHeaders(); + const headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (const line of headerLines) { + const index = line.indexOf(":"); + const headerName = line.slice(0, index); + const headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} +function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", () => reject(new RestError(`Failed to send request to ${request.url}`, { + code: RestError.REQUEST_SEND_ERROR, + request, + }))); + const abortError = new AbortError("The operation was aborted."); + xhr.addEventListener("abort", () => reject(abortError)); + xhr.addEventListener("timeout", () => reject(abortError)); +} +/** + * Create a new HttpClient instance for the browser environment. + * @internal + */ +export function createXhrHttpClient() { + return new XhrHttpClient(); +} +//# sourceMappingURL=xhrHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/accessTokenCache.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/accessTokenCache.js new file mode 100644 index 000000000..ff7dee1a6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/accessTokenCache.js @@ -0,0 +1,32 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Defines the default token refresh buffer duration. + */ +export const DefaultTokenRefreshBufferMs = 2 * 60 * 1000; // 2 Minutes +/** + * Provides an AccessTokenCache implementation which clears + * the cached AccessToken's after the expiresOnTimestamp has + * passed. + * @internal + */ +export class ExpiringAccessTokenCache { + /** + * Constructs an instance of ExpiringAccessTokenCache with + * an optional expiration buffer time. + */ + constructor(tokenRefreshBufferMs = DefaultTokenRefreshBufferMs) { + this.tokenRefreshBufferMs = tokenRefreshBufferMs; + } + setCachedToken(accessToken) { + this.cachedToken = accessToken; + } + getCachedToken() { + if (this.cachedToken && + Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { + this.cachedToken = undefined; + } + return this.cachedToken; + } +} +//# sourceMappingURL=accessTokenCache.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/constants.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/constants.js new file mode 100644 index 000000000..88acfa811 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/constants.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export const SDK_VERSION = "1.16.0"; +export const DEFAULT_RETRY_POLICY_COUNT = 3; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/createPipelineFromOptions.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/createPipelineFromOptions.js new file mode 100644 index 000000000..1cc155029 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/createPipelineFromOptions.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { logPolicy } from "./policies/logPolicy.js"; +import { createEmptyPipeline } from "./pipeline.js"; +import { redirectPolicy } from "./policies/redirectPolicy.js"; +import { userAgentPolicy } from "./policies/userAgentPolicy.js"; +import { multipartPolicy, multipartPolicyName } from "./policies/multipartPolicy.js"; +import { decompressResponsePolicy } from "./policies/decompressResponsePolicy.js"; +import { defaultRetryPolicy } from "./policies/defaultRetryPolicy.js"; +import { formDataPolicy } from "./policies/formDataPolicy.js"; +import { isNodeLike } from "@azure/core-util"; +import { proxyPolicy } from "./policies/proxyPolicy.js"; +import { setClientRequestIdPolicy } from "./policies/setClientRequestIdPolicy.js"; +import { tlsPolicy } from "./policies/tlsPolicy.js"; +import { tracingPolicy } from "./policies/tracingPolicy.js"; +/** + * Create a new pipeline with a default set of customizable policies. + * @param options - Options to configure a custom pipeline. + */ +export function createPipelineFromOptions(options) { + var _a; + const pipeline = createEmptyPipeline(); + if (isNodeLike) { + if (options.tlsOptions) { + pipeline.addPolicy(tlsPolicy(options.tlsOptions)); + } + pipeline.addPolicy(proxyPolicy(options.proxyOptions)); + pipeline.addPolicy(decompressResponsePolicy()); + } + pipeline.addPolicy(formDataPolicy(), { beforePolicies: [multipartPolicyName] }); + pipeline.addPolicy(userAgentPolicy(options.userAgentOptions)); + pipeline.addPolicy(setClientRequestIdPolicy((_a = options.telemetryOptions) === null || _a === void 0 ? void 0 : _a.clientRequestIdHeaderName)); + // The multipart policy is added after policies with no phase, so that + // policies can be added between it and formDataPolicy to modify + // properties (e.g., making the boundary constant in recorded tests). + pipeline.addPolicy(multipartPolicy(), { afterPhase: "Deserialize" }); + pipeline.addPolicy(defaultRetryPolicy(options.retryOptions), { phase: "Retry" }); + pipeline.addPolicy(tracingPolicy(options.userAgentOptions), { afterPhase: "Retry" }); + if (isNodeLike) { + // Both XHR and Fetch expect to handle redirects automatically, + // so only include this policy when we're in Node. + pipeline.addPolicy(redirectPolicy(options.redirectOptions), { afterPhase: "Retry" }); + } + pipeline.addPolicy(logPolicy(options.loggingOptions), { afterPhase: "Sign" }); + return pipeline; +} +//# sourceMappingURL=createPipelineFromOptions.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/defaultHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/defaultHttpClient.js new file mode 100644 index 000000000..afd2e0e9f --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/defaultHttpClient.js @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createXhrHttpClient } from "./xhrHttpClient.js"; +/** + * Create the correct HttpClient for the current environment. + */ +export function createDefaultHttpClient() { + return createXhrHttpClient(); +} +//# sourceMappingURL=defaultHttpClient-react-native.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/fetchHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/fetchHttpClient.js new file mode 100644 index 000000000..64942e96d --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/fetchHttpClient.js @@ -0,0 +1,256 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +import { RestError } from "./restError.js"; +import { createHttpHeaders } from "./httpHeaders.js"; +import { isNodeReadableStream, isWebReadableStream } from "./util/typeGuards.js"; +/** + * Checks if the body is a Blob or Blob-like + */ +function isBlob(body) { + // File objects count as a type of Blob, so we want to use instanceof explicitly + return (typeof Blob === "function" || typeof Blob === "object") && body instanceof Blob; +} +/** + * A HttpClient implementation that uses window.fetch to send HTTP requests. + * @internal + */ +class FetchHttpClient { + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + try { + return await makeRequest(request); + } + catch (e) { + throw getError(e, request); + } + } +} +/** + * Sends a request + */ +async function makeRequest(request) { + const { abortController, abortControllerCleanup } = setupAbortSignal(request); + try { + const headers = buildFetchHeaders(request.headers); + const { streaming, body: requestBody } = buildRequestBody(request); + const requestInit = Object.assign(Object.assign({ body: requestBody, method: request.method, headers: headers, signal: abortController.signal }, ("credentials" in Request.prototype + ? { credentials: request.withCredentials ? "include" : "same-origin" } + : {})), ("cache" in Request.prototype ? { cache: "no-store" } : {})); + // According to https://fetch.spec.whatwg.org/#fetch-method, + // init.duplex must be set when body is a ReadableStream object. + // currently "half" is the only valid value. + if (streaming) { + requestInit.duplex = "half"; + } + /** + * Developers of the future: + * Do not set redirect: "manual" as part + * of request options. + * It will not work as you expect. + */ + const response = await fetch(request.url, requestInit); + // If we're uploading a blob, we need to fire the progress event manually + if (isBlob(request.body) && request.onUploadProgress) { + request.onUploadProgress({ loadedBytes: request.body.size }); + } + return buildPipelineResponse(response, request, abortControllerCleanup); + } + catch (e) { + abortControllerCleanup === null || abortControllerCleanup === void 0 ? void 0 : abortControllerCleanup(); + throw e; + } +} +/** + * Creates a pipeline response from a Fetch response; + */ +async function buildPipelineResponse(httpResponse, request, abortControllerCleanup) { + var _a, _b; + const headers = buildPipelineHeaders(httpResponse); + const response = { + request, + headers, + status: httpResponse.status, + }; + const bodyStream = isWebReadableStream(httpResponse.body) + ? buildBodyStream(httpResponse.body, { + onProgress: request.onDownloadProgress, + onEnd: abortControllerCleanup, + }) + : httpResponse.body; + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(Number.POSITIVE_INFINITY)) || + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(response.status))) { + if (request.enableBrowserStreams) { + response.browserStreamBody = bodyStream !== null && bodyStream !== void 0 ? bodyStream : undefined; + } + else { + const responseStream = new Response(bodyStream); + response.blobBody = responseStream.blob(); + abortControllerCleanup === null || abortControllerCleanup === void 0 ? void 0 : abortControllerCleanup(); + } + } + else { + const responseStream = new Response(bodyStream); + response.bodyAsText = await responseStream.text(); + abortControllerCleanup === null || abortControllerCleanup === void 0 ? void 0 : abortControllerCleanup(); + } + return response; +} +function setupAbortSignal(request) { + const abortController = new AbortController(); + // Cleanup function + let abortControllerCleanup; + /** + * Attach an abort listener to the request + */ + let abortListener; + if (request.abortSignal) { + if (request.abortSignal.aborted) { + throw new AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + request.abortSignal.addEventListener("abort", abortListener); + abortControllerCleanup = () => { + var _a; + if (abortListener) { + (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + } + }; + } + // If a timeout was passed, call the abort signal once the time elapses + if (request.timeout > 0) { + setTimeout(() => { + abortController.abort(); + }, request.timeout); + } + return { abortController, abortControllerCleanup }; +} +/** + * Gets the specific error + */ +// eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters +function getError(e, request) { + var _a; + if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { + return e; + } + else { + return new RestError(`Error sending request: ${e.message}`, { + code: (_a = e === null || e === void 0 ? void 0 : e.code) !== null && _a !== void 0 ? _a : RestError.REQUEST_SEND_ERROR, + request, + }); + } +} +/** + * Converts PipelineRequest headers to Fetch headers + */ +function buildFetchHeaders(pipelineHeaders) { + const headers = new Headers(); + for (const [name, value] of pipelineHeaders) { + headers.append(name, value); + } + return headers; +} +function buildPipelineHeaders(httpResponse) { + const responseHeaders = createHttpHeaders(); + for (const [name, value] of httpResponse.headers) { + responseHeaders.set(name, value); + } + return responseHeaders; +} +function buildRequestBody(request) { + const body = typeof request.body === "function" ? request.body() : request.body; + if (isNodeReadableStream(body)) { + throw new Error("Node streams are not supported in browser environment."); + } + return isWebReadableStream(body) + ? { streaming: true, body: buildBodyStream(body, { onProgress: request.onUploadProgress }) } + : { streaming: false, body }; +} +/** + * Reads the request/response original stream and stream it through a new + * ReadableStream, this is done to be able to report progress in a way that + * all modern browsers support. TransformStreams would be an alternative, + * however they are not yet supported by all browsers i.e Firefox + */ +function buildBodyStream(readableStream, options = {}) { + let loadedBytes = 0; + const { onProgress, onEnd } = options; + // If the current browser supports pipeThrough we use a TransformStream + // to report progress + if (isTransformStreamSupported(readableStream)) { + return readableStream.pipeThrough(new TransformStream({ + transform(chunk, controller) { + if (chunk === null) { + controller.terminate(); + return; + } + controller.enqueue(chunk); + loadedBytes += chunk.length; + if (onProgress) { + onProgress({ loadedBytes }); + } + }, + flush() { + onEnd === null || onEnd === void 0 ? void 0 : onEnd(); + }, + })); + } + else { + // If we can't use transform streams, wrap the original stream in a new readable stream + // and use pull to enqueue each chunk and report progress. + const reader = readableStream.getReader(); + return new ReadableStream({ + async pull(controller) { + var _a; + const { done, value } = await reader.read(); + // When no more data needs to be consumed, break the reading + if (done || !value) { + onEnd === null || onEnd === void 0 ? void 0 : onEnd(); + // Close the stream + controller.close(); + reader.releaseLock(); + return; + } + loadedBytes += (_a = value === null || value === void 0 ? void 0 : value.length) !== null && _a !== void 0 ? _a : 0; + // Enqueue the next data chunk into our target stream + controller.enqueue(value); + if (onProgress) { + onProgress({ loadedBytes }); + } + }, + cancel(reason) { + onEnd === null || onEnd === void 0 ? void 0 : onEnd(); + return reader.cancel(reason); + }, + }); + } +} +/** + * Create a new HttpClient instance for the browser environment. + * @internal + */ +export function createFetchHttpClient() { + return new FetchHttpClient(); +} +function isTransformStreamSupported(readableStream) { + return readableStream.pipeThrough !== undefined && self.TransformStream !== undefined; +} +//# sourceMappingURL=fetchHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/httpHeaders.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/httpHeaders.js new file mode 100644 index 000000000..c4b7c919f --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/httpHeaders.js @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +function normalizeName(name) { + return name.toLowerCase(); +} +function* headerIterator(map) { + for (const entry of map.values()) { + yield [entry.name, entry.value]; + } +} +class HttpHeadersImpl { + constructor(rawHeaders) { + this._headersMap = new Map(); + if (rawHeaders) { + for (const headerName of Object.keys(rawHeaders)) { + this.set(headerName, rawHeaders[headerName]); + } + } + } + /** + * Set a header in this collection with the provided name and value. The name is + * case-insensitive. + * @param name - The name of the header to set. This value is case-insensitive. + * @param value - The value of the header to set. + */ + set(name, value) { + this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); + } + /** + * Get the header value for the provided header name, or undefined if no header exists in this + * collection with the provided name. + * @param name - The name of the header. This value is case-insensitive. + */ + get(name) { + var _a; + return (_a = this._headersMap.get(normalizeName(name))) === null || _a === void 0 ? void 0 : _a.value; + } + /** + * Get whether or not this header collection contains a header entry for the provided header name. + * @param name - The name of the header to set. This value is case-insensitive. + */ + has(name) { + return this._headersMap.has(normalizeName(name)); + } + /** + * Remove the header with the provided headerName. + * @param name - The name of the header to remove. + */ + delete(name) { + this._headersMap.delete(normalizeName(name)); + } + /** + * Get the JSON object representation of this HTTP header collection. + */ + toJSON(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const entry of this._headersMap.values()) { + result[entry.name] = entry.value; + } + } + else { + for (const [normalizedName, entry] of this._headersMap) { + result[normalizedName] = entry.value; + } + } + return result; + } + /** + * Get the string representation of this HTTP header collection. + */ + toString() { + return JSON.stringify(this.toJSON({ preserveCase: true })); + } + /** + * Iterate over tuples of header [name, value] pairs. + */ + [Symbol.iterator]() { + return headerIterator(this._headersMap); + } +} +/** + * Creates an object that satisfies the `HttpHeaders` interface. + * @param rawHeaders - A simple object representing initial headers + */ +export function createHttpHeaders(rawHeaders) { + return new HttpHeadersImpl(rawHeaders); +} +//# sourceMappingURL=httpHeaders.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/index.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/index.js new file mode 100644 index 000000000..619ce9ca5 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/index.js @@ -0,0 +1,28 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { createEmptyPipeline, } from "./pipeline.js"; +export { createPipelineFromOptions, } from "./createPipelineFromOptions.js"; +export { createDefaultHttpClient } from "./defaultHttpClient.js"; +export { createHttpHeaders } from "./httpHeaders.js"; +export { createPipelineRequest } from "./pipelineRequest.js"; +export { RestError, isRestError } from "./restError.js"; +export { decompressResponsePolicy, decompressResponsePolicyName, } from "./policies/decompressResponsePolicy.js"; +export { exponentialRetryPolicy, exponentialRetryPolicyName, } from "./policies/exponentialRetryPolicy.js"; +export { setClientRequestIdPolicy, setClientRequestIdPolicyName, } from "./policies/setClientRequestIdPolicy.js"; +export { logPolicy, logPolicyName } from "./policies/logPolicy.js"; +export { multipartPolicy, multipartPolicyName } from "./policies/multipartPolicy.js"; +export { proxyPolicy, proxyPolicyName, getDefaultProxySettings } from "./policies/proxyPolicy.js"; +export { redirectPolicy, redirectPolicyName, } from "./policies/redirectPolicy.js"; +export { systemErrorRetryPolicy, systemErrorRetryPolicyName, } from "./policies/systemErrorRetryPolicy.js"; +export { throttlingRetryPolicy, throttlingRetryPolicyName, } from "./policies/throttlingRetryPolicy.js"; +export { retryPolicy } from "./policies/retryPolicy.js"; +export { tracingPolicy, tracingPolicyName, } from "./policies/tracingPolicy.js"; +export { defaultRetryPolicy, } from "./policies/defaultRetryPolicy.js"; +export { userAgentPolicy, userAgentPolicyName, } from "./policies/userAgentPolicy.js"; +export { tlsPolicy, tlsPolicyName } from "./policies/tlsPolicy.js"; +export { formDataPolicy, formDataPolicyName } from "./policies/formDataPolicy.js"; +export { bearerTokenAuthenticationPolicy, bearerTokenAuthenticationPolicyName, } from "./policies/bearerTokenAuthenticationPolicy.js"; +export { ndJsonPolicy, ndJsonPolicyName } from "./policies/ndJsonPolicy.js"; +export { auxiliaryAuthenticationHeaderPolicy, auxiliaryAuthenticationHeaderPolicyName, } from "./policies/auxiliaryAuthenticationHeaderPolicy.js"; +export { createFile, createFileFromStream, } from "./util/file.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/interfaces.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/interfaces.js new file mode 100644 index 000000000..c0a2e2e65 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/interfaces.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/log.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/log.js new file mode 100644 index 000000000..10a0a4eb6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/log.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createClientLogger } from "@azure/logger"; +export const logger = createClientLogger("core-rest-pipeline"); +//# sourceMappingURL=log.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/nodeHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/nodeHttpClient.js new file mode 100644 index 000000000..9d49b52a1 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/nodeHttpClient.js @@ -0,0 +1,332 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import * as http from "node:http"; +import * as https from "node:https"; +import * as zlib from "node:zlib"; +import { Transform } from "node:stream"; +import { AbortError } from "@azure/abort-controller"; +import { createHttpHeaders } from "./httpHeaders.js"; +import { RestError } from "./restError.js"; +import { logger } from "./log.js"; +const DEFAULT_TLS_SETTINGS = {}; +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream) { + return new Promise((resolve) => { + stream.on("close", resolve); + stream.on("end", resolve); + stream.on("error", resolve); + }); +} +function isArrayBuffer(body) { + return body && typeof body.byteLength === "number"; +} +class ReportTransform extends Transform { + // eslint-disable-next-line @typescript-eslint/ban-types + _transform(chunk, _encoding, callback) { + this.push(chunk); + this.loadedBytes += chunk.length; + try { + this.progressCallback({ loadedBytes: this.loadedBytes }); + callback(); + } + catch (e) { + callback(e); + } + } + constructor(progressCallback) { + super(); + this.loadedBytes = 0; + this.progressCallback = progressCallback; + } +} +/** + * A HttpClient implementation that uses Node's "https" module to send HTTPS requests. + * @internal + */ +class NodeHttpClient { + constructor() { + this.cachedHttpsAgents = new WeakMap(); + } + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + var _a, _b, _c; + const abortController = new AbortController(); + let abortListener; + if (request.abortSignal) { + if (request.abortSignal.aborted) { + throw new AbortError("The operation was aborted."); + } + abortListener = (event) => { + if (event.type === "abort") { + abortController.abort(); + } + }; + request.abortSignal.addEventListener("abort", abortListener); + } + if (request.timeout > 0) { + setTimeout(() => { + abortController.abort(); + }, request.timeout); + } + const acceptEncoding = request.headers.get("Accept-Encoding"); + const shouldDecompress = (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("gzip")) || (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("deflate")); + let body = typeof request.body === "function" ? request.body() : request.body; + if (body && !request.headers.has("Content-Length")) { + const bodyLength = getBodyLength(body); + if (bodyLength !== null) { + request.headers.set("Content-Length", bodyLength); + } + } + let responseStream; + try { + if (body && request.onUploadProgress) { + const onUploadProgress = request.onUploadProgress; + const uploadReportStream = new ReportTransform(onUploadProgress); + uploadReportStream.on("error", (e) => { + logger.error("Error in upload progress", e); + }); + if (isReadableStream(body)) { + body.pipe(uploadReportStream); + } + else { + uploadReportStream.end(body); + } + body = uploadReportStream; + } + const res = await this.makeRequest(request, abortController, body); + const headers = getResponseHeaders(res); + const status = (_a = res.statusCode) !== null && _a !== void 0 ? _a : 0; + const response = { + status, + headers, + request, + }; + // Responses to HEAD must not have a body. + // If they do return a body, that body must be ignored. + if (request.method === "HEAD") { + // call resume() and not destroy() to avoid closing the socket + // and losing keep alive + res.resume(); + return response; + } + responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; + const onDownloadProgress = request.onDownloadProgress; + if (onDownloadProgress) { + const downloadReportStream = new ReportTransform(onDownloadProgress); + downloadReportStream.on("error", (e) => { + logger.error("Error in download progress", e); + }); + responseStream.pipe(downloadReportStream); + responseStream = downloadReportStream; + } + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(Number.POSITIVE_INFINITY)) || + ((_c = request.streamResponseStatusCodes) === null || _c === void 0 ? void 0 : _c.has(response.status))) { + response.readableStreamBody = responseStream; + } + else { + response.bodyAsText = await streamToText(responseStream); + } + return response; + } + finally { + // clean up event listener + if (request.abortSignal && abortListener) { + let uploadStreamDone = Promise.resolve(); + if (isReadableStream(body)) { + uploadStreamDone = isStreamComplete(body); + } + let downloadStreamDone = Promise.resolve(); + if (isReadableStream(responseStream)) { + downloadStreamDone = isStreamComplete(responseStream); + } + Promise.all([uploadStreamDone, downloadStreamDone]) + .then(() => { + var _a; + // eslint-disable-next-line promise/always-return + if (abortListener) { + (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); + } + }) + .catch((e) => { + logger.warning("Error when cleaning up abortListener on httpRequest", e); + }); + } + } + } + makeRequest(request, abortController, body) { + var _a; + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + const agent = (_a = request.agent) !== null && _a !== void 0 ? _a : this.getOrCreateAgent(request, isInsecure); + const options = { + agent, + hostname: url.hostname, + path: `${url.pathname}${url.search}`, + port: url.port, + method: request.method, + headers: request.headers.toJSON({ preserveCase: true }), + }; + return new Promise((resolve, reject) => { + const req = isInsecure ? http.request(options, resolve) : https.request(options, resolve); + req.once("error", (err) => { + var _a; + reject(new RestError(err.message, { code: (_a = err.code) !== null && _a !== void 0 ? _a : RestError.REQUEST_SEND_ERROR, request })); + }); + abortController.signal.addEventListener("abort", () => { + const abortError = new AbortError("The operation was aborted."); + req.destroy(abortError); + reject(abortError); + }); + if (body && isReadableStream(body)) { + body.pipe(req); + } + else if (body) { + if (typeof body === "string" || Buffer.isBuffer(body)) { + req.end(body); + } + else if (isArrayBuffer(body)) { + req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); + } + else { + logger.error("Unrecognized body type", body); + reject(new RestError("Unrecognized body type")); + } + } + else { + // streams don't like "undefined" being passed as data + req.end(); + } + }); + } + getOrCreateAgent(request, isInsecure) { + var _a; + const disableKeepAlive = request.disableKeepAlive; + // Handle Insecure requests first + if (isInsecure) { + if (disableKeepAlive) { + // keepAlive:false is the default so we don't need a custom Agent + return http.globalAgent; + } + if (!this.cachedHttpAgent) { + // If there is no cached agent create a new one and cache it. + this.cachedHttpAgent = new http.Agent({ keepAlive: true }); + } + return this.cachedHttpAgent; + } + else { + if (disableKeepAlive && !request.tlsSettings) { + // When there are no tlsSettings and keepAlive is false + // we don't need a custom agent + return https.globalAgent; + } + // We use the tlsSettings to index cached clients + const tlsSettings = (_a = request.tlsSettings) !== null && _a !== void 0 ? _a : DEFAULT_TLS_SETTINGS; + // Get the cached agent or create a new one with the + // provided values for keepAlive and tlsSettings + let agent = this.cachedHttpsAgents.get(tlsSettings); + if (agent && agent.options.keepAlive === !disableKeepAlive) { + return agent; + } + logger.info("No cached TLS Agent exist, creating a new Agent"); + agent = new https.Agent(Object.assign({ + // keepAlive is true if disableKeepAlive is false. + keepAlive: !disableKeepAlive }, tlsSettings)); + this.cachedHttpsAgents.set(tlsSettings, agent); + return agent; + } + } +} +function getResponseHeaders(res) { + const headers = createHttpHeaders(); + for (const header of Object.keys(res.headers)) { + const value = res.headers[header]; + if (Array.isArray(value)) { + if (value.length > 0) { + headers.set(header, value[0]); + } + } + else if (value) { + headers.set(header, value); + } + } + return headers; +} +function getDecodedResponseStream(stream, headers) { + const contentEncoding = headers.get("Content-Encoding"); + if (contentEncoding === "gzip") { + const unzip = zlib.createGunzip(); + stream.pipe(unzip); + return unzip; + } + else if (contentEncoding === "deflate") { + const inflate = zlib.createInflate(); + stream.pipe(inflate); + return inflate; + } + return stream; +} +function streamToText(stream) { + return new Promise((resolve, reject) => { + const buffer = []; + stream.on("data", (chunk) => { + if (Buffer.isBuffer(chunk)) { + buffer.push(chunk); + } + else { + buffer.push(Buffer.from(chunk)); + } + }); + stream.on("end", () => { + resolve(Buffer.concat(buffer).toString("utf8")); + }); + stream.on("error", (e) => { + if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { + reject(e); + } + else { + reject(new RestError(`Error reading response as text: ${e.message}`, { + code: RestError.PARSE_ERROR, + })); + } + }); + }); +} +/** @internal */ +export function getBodyLength(body) { + if (!body) { + return 0; + } + else if (Buffer.isBuffer(body)) { + return body.length; + } + else if (isReadableStream(body)) { + return null; + } + else if (isArrayBuffer(body)) { + return body.byteLength; + } + else if (typeof body === "string") { + return Buffer.from(body).length; + } + else { + return null; + } +} +/** + * Create a new HttpClient instance for the NodeJS environment. + * @internal + */ +export function createNodeHttpClient() { + return new NodeHttpClient(); +} +//# sourceMappingURL=nodeHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/package.json b/node_modules/@azure/core-rest-pipeline/dist/react-native/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/pipeline.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/pipeline.js new file mode 100644 index 000000000..07e8ced7f --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/pipeline.js @@ -0,0 +1,262 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const ValidPhaseNames = new Set(["Deserialize", "Serialize", "Retry", "Sign"]); +/** + * A private implementation of Pipeline. + * Do not export this class from the package. + * @internal + */ +class HttpPipeline { + constructor(policies) { + var _a; + this._policies = []; + this._policies = (_a = policies === null || policies === void 0 ? void 0 : policies.slice(0)) !== null && _a !== void 0 ? _a : []; + this._orderedPolicies = undefined; + } + addPolicy(policy, options = {}) { + if (options.phase && options.afterPhase) { + throw new Error("Policies inside a phase cannot specify afterPhase."); + } + if (options.phase && !ValidPhaseNames.has(options.phase)) { + throw new Error(`Invalid phase name: ${options.phase}`); + } + if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { + throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); + } + this._policies.push({ + policy, + options, + }); + this._orderedPolicies = undefined; + } + removePolicy(options) { + const removedPolicies = []; + this._policies = this._policies.filter((policyDescriptor) => { + if ((options.name && policyDescriptor.policy.name === options.name) || + (options.phase && policyDescriptor.options.phase === options.phase)) { + removedPolicies.push(policyDescriptor.policy); + return false; + } + else { + return true; + } + }); + this._orderedPolicies = undefined; + return removedPolicies; + } + sendRequest(httpClient, request) { + const policies = this.getOrderedPolicies(); + const pipeline = policies.reduceRight((next, policy) => { + return (req) => { + return policy.sendRequest(req, next); + }; + }, (req) => httpClient.sendRequest(req)); + return pipeline(request); + } + getOrderedPolicies() { + if (!this._orderedPolicies) { + this._orderedPolicies = this.orderPolicies(); + } + return this._orderedPolicies; + } + clone() { + return new HttpPipeline(this._policies); + } + static create() { + return new HttpPipeline(); + } + orderPolicies() { + /** + * The goal of this method is to reliably order pipeline policies + * based on their declared requirements when they were added. + * + * Order is first determined by phase: + * + * 1. Serialize Phase + * 2. Policies not in a phase + * 3. Deserialize Phase + * 4. Retry Phase + * 5. Sign Phase + * + * Within each phase, policies are executed in the order + * they were added unless they were specified to execute + * before/after other policies or after a particular phase. + * + * To determine the final order, we will walk the policy list + * in phase order multiple times until all dependencies are + * satisfied. + * + * `afterPolicies` are the set of policies that must be + * executed before a given policy. This requirement is + * considered satisfied when each of the listed policies + * have been scheduled. + * + * `beforePolicies` are the set of policies that must be + * executed after a given policy. Since this dependency + * can be expressed by converting it into a equivalent + * `afterPolicies` declarations, they are normalized + * into that form for simplicity. + * + * An `afterPhase` dependency is considered satisfied when all + * policies in that phase have scheduled. + * + */ + const result = []; + // Track all policies we know about. + const policyMap = new Map(); + function createPhase(name) { + return { + name, + policies: new Set(), + hasRun: false, + hasAfterPolicies: false, + }; + } + // Track policies for each phase. + const serializePhase = createPhase("Serialize"); + const noPhase = createPhase("None"); + const deserializePhase = createPhase("Deserialize"); + const retryPhase = createPhase("Retry"); + const signPhase = createPhase("Sign"); + // a list of phases in order + const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; + // Small helper function to map phase name to each Phase + function getPhase(phase) { + if (phase === "Retry") { + return retryPhase; + } + else if (phase === "Serialize") { + return serializePhase; + } + else if (phase === "Deserialize") { + return deserializePhase; + } + else if (phase === "Sign") { + return signPhase; + } + else { + return noPhase; + } + } + // First walk each policy and create a node to track metadata. + for (const descriptor of this._policies) { + const policy = descriptor.policy; + const options = descriptor.options; + const policyName = policy.name; + if (policyMap.has(policyName)) { + throw new Error("Duplicate policy names not allowed in pipeline"); + } + const node = { + policy, + dependsOn: new Set(), + dependants: new Set(), + }; + if (options.afterPhase) { + node.afterPhase = getPhase(options.afterPhase); + node.afterPhase.hasAfterPolicies = true; + } + policyMap.set(policyName, node); + const phase = getPhase(options.phase); + phase.policies.add(node); + } + // Now that each policy has a node, connect dependency references. + for (const descriptor of this._policies) { + const { policy, options } = descriptor; + const policyName = policy.name; + const node = policyMap.get(policyName); + if (!node) { + throw new Error(`Missing node for policy ${policyName}`); + } + if (options.afterPolicies) { + for (const afterPolicyName of options.afterPolicies) { + const afterNode = policyMap.get(afterPolicyName); + if (afterNode) { + // Linking in both directions helps later + // when we want to notify dependants. + node.dependsOn.add(afterNode); + afterNode.dependants.add(node); + } + } + } + if (options.beforePolicies) { + for (const beforePolicyName of options.beforePolicies) { + const beforeNode = policyMap.get(beforePolicyName); + if (beforeNode) { + // To execute before another node, make it + // depend on the current node. + beforeNode.dependsOn.add(node); + node.dependants.add(beforeNode); + } + } + } + } + function walkPhase(phase) { + phase.hasRun = true; + // Sets iterate in insertion order + for (const node of phase.policies) { + if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { + // If this node is waiting on a phase to complete, + // we need to skip it for now. + // Even if the phase is empty, we should wait for it + // to be walked to avoid re-ordering policies. + continue; + } + if (node.dependsOn.size === 0) { + // If there's nothing else we're waiting for, we can + // add this policy to the result list. + result.push(node.policy); + // Notify anything that depends on this policy that + // the policy has been scheduled. + for (const dependant of node.dependants) { + dependant.dependsOn.delete(node); + } + policyMap.delete(node.policy.name); + phase.policies.delete(node); + } + } + } + function walkPhases() { + for (const phase of orderedPhases) { + walkPhase(phase); + // if the phase isn't complete + if (phase.policies.size > 0 && phase !== noPhase) { + if (!noPhase.hasRun) { + // Try running noPhase to see if that unblocks this phase next tick. + // This can happen if a phase that happens before noPhase + // is waiting on a noPhase policy to complete. + walkPhase(noPhase); + } + // Don't proceed to the next phase until this phase finishes. + return; + } + if (phase.hasAfterPolicies) { + // Run any policies unblocked by this phase + walkPhase(noPhase); + } + } + } + // Iterate until we've put every node in the result list. + let iteration = 0; + while (policyMap.size > 0) { + iteration++; + const initialResultLength = result.length; + // Keep walking each phase in order until we can order every node. + walkPhases(); + // The result list *should* get at least one larger each time + // after the first full pass. + // Otherwise, we're going to loop forever. + if (result.length <= initialResultLength && iteration > 1) { + throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); + } + } + return result; + } +} +/** + * Creates a totally empty pipeline. + * Useful for testing or creating a custom one. + */ +export function createEmptyPipeline() { + return HttpPipeline.create(); +} +//# sourceMappingURL=pipeline.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/pipelineRequest.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/pipelineRequest.js new file mode 100644 index 000000000..866575912 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/pipelineRequest.js @@ -0,0 +1,36 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHttpHeaders } from "./httpHeaders.js"; +import { randomUUID } from "@azure/core-util"; +class PipelineRequestImpl { + constructor(options) { + var _a, _b, _c, _d, _e, _f, _g; + this.url = options.url; + this.body = options.body; + this.headers = (_a = options.headers) !== null && _a !== void 0 ? _a : createHttpHeaders(); + this.method = (_b = options.method) !== null && _b !== void 0 ? _b : "GET"; + this.timeout = (_c = options.timeout) !== null && _c !== void 0 ? _c : 0; + this.multipartBody = options.multipartBody; + this.formData = options.formData; + this.disableKeepAlive = (_d = options.disableKeepAlive) !== null && _d !== void 0 ? _d : false; + this.proxySettings = options.proxySettings; + this.streamResponseStatusCodes = options.streamResponseStatusCodes; + this.withCredentials = (_e = options.withCredentials) !== null && _e !== void 0 ? _e : false; + this.abortSignal = options.abortSignal; + this.tracingOptions = options.tracingOptions; + this.onUploadProgress = options.onUploadProgress; + this.onDownloadProgress = options.onDownloadProgress; + this.requestId = options.requestId || randomUUID(); + this.allowInsecureConnection = (_f = options.allowInsecureConnection) !== null && _f !== void 0 ? _f : false; + this.enableBrowserStreams = (_g = options.enableBrowserStreams) !== null && _g !== void 0 ? _g : false; + } +} +/** + * Creates a new pipeline request with the given options. + * This method is to allow for the easy setting of default values and not required. + * @param options - The options to create the request with. + */ +export function createPipelineRequest(options) { + return new PipelineRequestImpl(options); +} +//# sourceMappingURL=pipelineRequest.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/auxiliaryAuthenticationHeaderPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/auxiliaryAuthenticationHeaderPolicy.js new file mode 100644 index 000000000..912154a6a --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/auxiliaryAuthenticationHeaderPolicy.js @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createTokenCycler } from "../util/tokenCycler.js"; +import { logger as coreLogger } from "../log.js"; +/** + * The programmatic identifier of the auxiliaryAuthenticationHeaderPolicy. + */ +export const auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy"; +const AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary"; +async function sendAuthorizeRequest(options) { + var _a, _b; + const { scopes, getAccessToken, request } = options; + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + }; + return (_b = (_a = (await getAccessToken(scopes, getTokenOptions))) === null || _a === void 0 ? void 0 : _a.token) !== null && _b !== void 0 ? _b : ""; +} +/** + * A policy for external tokens to `x-ms-authorization-auxiliary` header. + * This header will be used when creating a cross-tenant application we may need to handle authentication requests + * for resources that are in different tenants. + * You could see [ARM docs](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/authenticate-multi-tenant) for a rundown of how this feature works + */ +export function auxiliaryAuthenticationHeaderPolicy(options) { + const { credentials, scopes } = options; + const logger = options.logger || coreLogger; + const tokenCyclerMap = new WeakMap(); + return { + name: auxiliaryAuthenticationHeaderPolicyName, + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs."); + } + if (!credentials || credentials.length === 0) { + logger.info(`${auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`); + return next(request); + } + const tokenPromises = []; + for (const credential of credentials) { + let getAccessToken = tokenCyclerMap.get(credential); + if (!getAccessToken) { + getAccessToken = createTokenCycler(credential); + tokenCyclerMap.set(credential, getAccessToken); + } + tokenPromises.push(sendAuthorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger, + })); + } + const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token)); + if (auxiliaryTokens.length === 0) { + logger.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`); + return next(request); + } + request.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", ")); + return next(request); + }, + }; +} +//# sourceMappingURL=auxiliaryAuthenticationHeaderPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/bearerTokenAuthenticationPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/bearerTokenAuthenticationPolicy.js new file mode 100644 index 000000000..4ef00eb8d --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/bearerTokenAuthenticationPolicy.js @@ -0,0 +1,108 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createTokenCycler } from "../util/tokenCycler.js"; +import { logger as coreLogger } from "../log.js"; +/** + * The programmatic identifier of the bearerTokenAuthenticationPolicy. + */ +export const bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy"; +/** + * Default authorize request handler + */ +async function defaultAuthorizeRequest(options) { + const { scopes, getAccessToken, request } = options; + const getTokenOptions = { + abortSignal: request.abortSignal, + tracingOptions: request.tracingOptions, + }; + const accessToken = await getAccessToken(scopes, getTokenOptions); + if (accessToken) { + options.request.headers.set("Authorization", `Bearer ${accessToken.token}`); + } +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * A policy that can request a token from a TokenCredential implementation and + * then apply it to the Authorization header of a request as a Bearer token. + */ +export function bearerTokenAuthenticationPolicy(options) { + var _a; + const { credential, scopes, challengeCallbacks } = options; + const logger = options.logger || coreLogger; + const callbacks = Object.assign({ authorizeRequest: (_a = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequest) !== null && _a !== void 0 ? _a : defaultAuthorizeRequest, authorizeRequestOnChallenge: challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequestOnChallenge }, challengeCallbacks); + // This function encapsulates the entire process of reliably retrieving the token + // The options are left out of the public API until there's demand to configure this. + // Remember to extend `BearerTokenAuthenticationPolicyOptions` with `TokenCyclerOptions` + // in order to pass through the `options` object. + const getAccessToken = credential + ? createTokenCycler(credential /* , options */) + : () => Promise.resolve(null); + return { + name: bearerTokenAuthenticationPolicyName, + /** + * If there's no challenge parameter: + * - It will try to retrieve the token using the cache, or the credential's getToken. + * - Then it will try the next policy with or without the retrieved token. + * + * It uses the challenge parameters to: + * - Skip a first attempt to get the token from the credential if there's no cached token, + * since it expects the token to be retrievable only after the challenge. + * - Prepare the outgoing request if the `prepareRequest` method has been provided. + * - Send an initial request to receive the challenge if it fails. + * - Process a challenge if the response contains it. + * - Retrieve a token with the challenge information, then re-send the request. + */ + async sendRequest(request, next) { + if (!request.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + await callbacks.authorizeRequest({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + getAccessToken, + logger, + }); + let response; + let error; + try { + response = await next(request); + } + catch (err) { + error = err; + response = err.response; + } + if (callbacks.authorizeRequestOnChallenge && + (response === null || response === void 0 ? void 0 : response.status) === 401 && + getChallenge(response)) { + // processes challenge + const shouldSendRequest = await callbacks.authorizeRequestOnChallenge({ + scopes: Array.isArray(scopes) ? scopes : [scopes], + request, + response, + getAccessToken, + logger, + }); + if (shouldSendRequest) { + return next(request); + } + } + if (error) { + throw error; + } + else { + return response; + } + }, + }; +} +//# sourceMappingURL=bearerTokenAuthenticationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/decompressResponsePolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/decompressResponsePolicy.js new file mode 100644 index 000000000..40e4ac757 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/decompressResponsePolicy.js @@ -0,0 +1,23 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the decompressResponsePolicy. + */ +export const decompressResponsePolicyName = "decompressResponsePolicy"; +/** + * A policy to enable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +export function decompressResponsePolicy() { + return { + name: decompressResponsePolicyName, + async sendRequest(request, next) { + // HEAD requests have no body + if (request.method !== "HEAD") { + request.headers.set("Accept-Encoding", "gzip,deflate"); + } + return next(request); + }, + }; +} +//# sourceMappingURL=decompressResponsePolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/defaultRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/defaultRetryPolicy.js new file mode 100644 index 000000000..bda83eb35 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/defaultRetryPolicy.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { exponentialRetryStrategy } from "../retryStrategies/exponentialRetryStrategy.js"; +import { throttlingRetryStrategy } from "../retryStrategies/throttlingRetryStrategy.js"; +import { retryPolicy } from "./retryPolicy.js"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +/** + * Name of the {@link defaultRetryPolicy} + */ +export const defaultRetryPolicyName = "defaultRetryPolicy"; +/** + * A policy that retries according to three strategies: + * - When the server sends a 429 response with a Retry-After header. + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay. + */ +export function defaultRetryPolicy(options = {}) { + var _a; + return { + name: defaultRetryPolicyName, + sendRequest: retryPolicy([throttlingRetryStrategy(), exponentialRetryStrategy(options)], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=defaultRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/exponentialRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/exponentialRetryPolicy.js new file mode 100644 index 000000000..12193c26b --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/exponentialRetryPolicy.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { exponentialRetryStrategy } from "../retryStrategies/exponentialRetryStrategy.js"; +import { retryPolicy } from "./retryPolicy.js"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +/** + * The programmatic identifier of the exponentialRetryPolicy. + */ +export const exponentialRetryPolicyName = "exponentialRetryPolicy"; +/** + * A policy that attempts to retry requests while introducing an exponentially increasing delay. + * @param options - Options that configure retry logic. + */ +export function exponentialRetryPolicy(options = {}) { + var _a; + return retryPolicy([ + exponentialRetryStrategy(Object.assign(Object.assign({}, options), { ignoreSystemErrors: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }); +} +//# sourceMappingURL=exponentialRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/formDataPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/formDataPolicy.js new file mode 100644 index 000000000..e4816a489 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/formDataPolicy.js @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNodeLike, stringToUint8Array } from "@azure/core-util"; +import { createHttpHeaders } from "../httpHeaders.js"; +/** + * The programmatic identifier of the formDataPolicy. + */ +export const formDataPolicyName = "formDataPolicy"; +function formDataToFormDataMap(formData) { + var _a; + const formDataMap = {}; + for (const [key, value] of formData.entries()) { + (_a = formDataMap[key]) !== null && _a !== void 0 ? _a : (formDataMap[key] = []); + formDataMap[key].push(value); + } + return formDataMap; +} +/** + * A policy that encodes FormData on the request into the body. + */ +export function formDataPolicy() { + return { + name: formDataPolicyName, + async sendRequest(request, next) { + if (isNodeLike && typeof FormData !== "undefined" && request.body instanceof FormData) { + request.formData = formDataToFormDataMap(request.body); + request.body = undefined; + } + if (request.formData) { + const contentType = request.headers.get("Content-Type"); + if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { + request.body = wwwFormUrlEncode(request.formData); + } + else { + await prepareFormData(request.formData, request); + } + request.formData = undefined; + } + return next(request); + }, + }; +} +function wwwFormUrlEncode(formData) { + const urlSearchParams = new URLSearchParams(); + for (const [key, value] of Object.entries(formData)) { + if (Array.isArray(value)) { + for (const subValue of value) { + urlSearchParams.append(key, subValue.toString()); + } + } + else { + urlSearchParams.append(key, value.toString()); + } + } + return urlSearchParams.toString(); +} +async function prepareFormData(formData, request) { + // validate content type (multipart/form-data) + const contentType = request.headers.get("Content-Type"); + if (contentType && !contentType.startsWith("multipart/form-data")) { + // content type is specified and is not multipart/form-data. Exit. + return; + } + request.headers.set("Content-Type", contentType !== null && contentType !== void 0 ? contentType : "multipart/form-data"); + // set body to MultipartRequestBody using content from FormDataMap + const parts = []; + for (const [fieldName, values] of Object.entries(formData)) { + for (const value of Array.isArray(values) ? values : [values]) { + if (typeof value === "string") { + parts.push({ + headers: createHttpHeaders({ + "Content-Disposition": `form-data; name="${fieldName}"`, + }), + body: stringToUint8Array(value, "utf-8"), + }); + } + else if (value === undefined || value === null || typeof value !== "object") { + throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`); + } + else { + // using || instead of ?? here since if value.name is empty we should create a file name + const fileName = value.name || "blob"; + const headers = createHttpHeaders(); + headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); + // again, || is used since an empty value.type means the content type is unset + headers.set("Content-Type", value.type || "application/octet-stream"); + parts.push({ + headers, + body: value, + }); + } + } + } + request.multipartBody = { parts }; +} +//# sourceMappingURL=formDataPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/logPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/logPolicy.js new file mode 100644 index 000000000..e981567c6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/logPolicy.js @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { logger as coreLogger } from "../log.js"; +import { Sanitizer } from "../util/sanitizer.js"; +/** + * The programmatic identifier of the logPolicy. + */ +export const logPolicyName = "logPolicy"; +/** + * A policy that logs all requests and responses. + * @param options - Options to configure logPolicy. + */ +export function logPolicy(options = {}) { + var _a; + const logger = (_a = options.logger) !== null && _a !== void 0 ? _a : coreLogger.info; + const sanitizer = new Sanitizer({ + additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, + additionalAllowedQueryParameters: options.additionalAllowedQueryParameters, + }); + return { + name: logPolicyName, + async sendRequest(request, next) { + if (!logger.enabled) { + return next(request); + } + logger(`Request: ${sanitizer.sanitize(request)}`); + const response = await next(request); + logger(`Response status code: ${response.status}`); + logger(`Headers: ${sanitizer.sanitize(response.headers)}`); + return response; + }, + }; +} +//# sourceMappingURL=logPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/multipartPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/multipartPolicy.js new file mode 100644 index 000000000..efa3fdcf9 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/multipartPolicy.js @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { randomUUID, stringToUint8Array } from "@azure/core-util"; +import { concat } from "../util/concat.js"; +import { isBlob } from "../util/typeGuards.js"; +function generateBoundary() { + return `----AzSDKFormBoundary${randomUUID()}`; +} +function encodeHeaders(headers) { + let result = ""; + for (const [key, value] of headers) { + result += `${key}: ${value}\r\n`; + } + return result; +} +function getLength(source) { + if (source instanceof Uint8Array) { + return source.byteLength; + } + else if (isBlob(source)) { + // if was created using createFile then -1 means we have an unknown size + return source.size === -1 ? undefined : source.size; + } + else { + return undefined; + } +} +function getTotalLength(sources) { + let total = 0; + for (const source of sources) { + const partLength = getLength(source); + if (partLength === undefined) { + return undefined; + } + else { + total += partLength; + } + } + return total; +} +async function buildRequestBody(request, parts, boundary) { + const sources = [ + stringToUint8Array(`--${boundary}`, "utf-8"), + ...parts.flatMap((part) => [ + stringToUint8Array("\r\n", "utf-8"), + stringToUint8Array(encodeHeaders(part.headers), "utf-8"), + stringToUint8Array("\r\n", "utf-8"), + part.body, + stringToUint8Array(`\r\n--${boundary}`, "utf-8"), + ]), + stringToUint8Array("--\r\n\r\n", "utf-8"), + ]; + const contentLength = getTotalLength(sources); + if (contentLength) { + request.headers.set("Content-Length", contentLength); + } + request.body = await concat(sources); +} +/** + * Name of multipart policy + */ +export const multipartPolicyName = "multipartPolicy"; +const maxBoundaryLength = 70; +const validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`); +function assertValidBoundary(boundary) { + if (boundary.length > maxBoundaryLength) { + throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`); + } + if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) { + throw new Error(`Multipart boundary "${boundary}" contains invalid characters`); + } +} +/** + * Pipeline policy for multipart requests + */ +export function multipartPolicy() { + return { + name: multipartPolicyName, + async sendRequest(request, next) { + var _a; + if (!request.multipartBody) { + return next(request); + } + if (request.body) { + throw new Error("multipartBody and regular body cannot be set at the same time"); + } + let boundary = request.multipartBody.boundary; + const contentTypeHeader = (_a = request.headers.get("Content-Type")) !== null && _a !== void 0 ? _a : "multipart/mixed"; + const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/); + if (!parsedHeader) { + throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`); + } + const [, contentType, parsedBoundary] = parsedHeader; + if (parsedBoundary && boundary && parsedBoundary !== boundary) { + throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`); + } + boundary !== null && boundary !== void 0 ? boundary : (boundary = parsedBoundary); + if (boundary) { + assertValidBoundary(boundary); + } + else { + boundary = generateBoundary(); + } + request.headers.set("Content-Type", `${contentType}; boundary=${boundary}`); + await buildRequestBody(request, request.multipartBody.parts, boundary); + request.multipartBody = undefined; + return next(request); + }, + }; +} +//# sourceMappingURL=multipartPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/ndJsonPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/ndJsonPolicy.js new file mode 100644 index 000000000..3b1fa6fb9 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/ndJsonPolicy.js @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the ndJsonPolicy. + */ +export const ndJsonPolicyName = "ndJsonPolicy"; +/** + * ndJsonPolicy is a policy used to control keep alive settings for every request. + */ +export function ndJsonPolicy() { + return { + name: ndJsonPolicyName, + async sendRequest(request, next) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } + return next(request); + }, + }; +} +//# sourceMappingURL=ndJsonPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/proxyPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/proxyPolicy.js new file mode 100644 index 000000000..1ec048c12 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/proxyPolicy.js @@ -0,0 +1,191 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { HttpsProxyAgent } from "https-proxy-agent"; +import { HttpProxyAgent } from "http-proxy-agent"; +import { logger } from "../log.js"; +const HTTPS_PROXY = "HTTPS_PROXY"; +const HTTP_PROXY = "HTTP_PROXY"; +const ALL_PROXY = "ALL_PROXY"; +const NO_PROXY = "NO_PROXY"; +/** + * The programmatic identifier of the proxyPolicy. + */ +export const proxyPolicyName = "proxyPolicy"; +/** + * Stores the patterns specified in NO_PROXY environment variable. + * @internal + */ +export const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function getEnvironmentValue(name) { + if (process.env[name]) { + return process.env[name]; + } + else if (process.env[name.toLowerCase()]) { + return process.env[name.toLowerCase()]; + } + return undefined; +} +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(HTTPS_PROXY); + const allProxy = getEnvironmentValue(ALL_PROXY); + const httpProxy = getEnvironmentValue(HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = new URL(uri).hostname; + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } + } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +export function loadNoProxy() { + const noProxy = getEnvironmentValue(NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * This method converts a proxy url into `ProxySettings` for use with ProxyPolicy. + * If no argument is given, it attempts to parse a proxy URL from the environment + * variables `HTTPS_PROXY` or `HTTP_PROXY`. + * @param proxyUrl - The url of the proxy to use. May contain authentication information. + * @deprecated - Internally this method is no longer necessary when setting proxy information. + */ +export function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const parsedUrl = new URL(proxyUrl); + const schema = parsedUrl.protocol ? parsedUrl.protocol + "//" : ""; + return { + host: schema + parsedUrl.hostname, + port: Number.parseInt(parsedUrl.port || "80"), + username: parsedUrl.username, + password: parsedUrl.password, + }; +} +/** + * This method attempts to parse a proxy URL from the environment + * variables `HTTPS_PROXY` or `HTTP_PROXY`. + */ +function getDefaultProxySettingsInternal() { + const envProxy = loadEnvironmentProxyValue(); + return envProxy ? new URL(envProxy) : undefined; +} +function getUrlFromProxySettings(settings) { + let parsedProxyUrl; + try { + parsedProxyUrl = new URL(settings.host); + } + catch (_error) { + throw new Error(`Expecting a valid host string in proxy settings, but found "${settings.host}".`); + } + parsedProxyUrl.port = String(settings.port); + if (settings.username) { + parsedProxyUrl.username = settings.username; + } + if (settings.password) { + parsedProxyUrl.password = settings.password; + } + return parsedProxyUrl; +} +function setProxyAgentOnRequest(request, cachedAgents, proxyUrl) { + // Custom Agent should take precedence so if one is present + // we should skip to avoid overwriting it. + if (request.agent) { + return; + } + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (request.tlsSettings) { + logger.warning("TLS settings are not supported in combination with custom Proxy, certificates provided to the client will be ignored."); + } + const headers = request.headers.toJSON(); + if (isInsecure) { + if (!cachedAgents.httpProxyAgent) { + cachedAgents.httpProxyAgent = new HttpProxyAgent(proxyUrl, { headers }); + } + request.agent = cachedAgents.httpProxyAgent; + } + else { + if (!cachedAgents.httpsProxyAgent) { + cachedAgents.httpsProxyAgent = new HttpsProxyAgent(proxyUrl, { headers }); + } + request.agent = cachedAgents.httpsProxyAgent; + } +} +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +export function proxyPolicy(proxySettings, options) { + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); + } + const defaultProxy = proxySettings + ? getUrlFromProxySettings(proxySettings) + : getDefaultProxySettingsInternal(); + const cachedAgents = {}; + return { + name: proxyPolicyName, + async sendRequest(request, next) { + var _a; + if (!request.proxySettings && + defaultProxy && + !isBypassed(request.url, (_a = options === null || options === void 0 ? void 0 : options.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, (options === null || options === void 0 ? void 0 : options.customNoProxyList) ? undefined : globalBypassedMap)) { + setProxyAgentOnRequest(request, cachedAgents, defaultProxy); + } + else if (request.proxySettings) { + setProxyAgentOnRequest(request, cachedAgents, getUrlFromProxySettings(request.proxySettings)); + } + return next(request); + }, + }; +} +//# sourceMappingURL=proxyPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/redirectPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/redirectPolicy.js new file mode 100644 index 000000000..302a1b8ef --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/redirectPolicy.js @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the redirectPolicy. + */ +export const redirectPolicyName = "redirectPolicy"; +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +/** + * A policy to follow Location headers from the server in order + * to support server-side redirection. + * In the browser, this policy is not used. + * @param options - Options to control policy behavior. + */ +export function redirectPolicy(options = {}) { + const { maxRetries = 20 } = options; + return { + name: redirectPolicyName, + async sendRequest(request, next) { + const response = await next(request); + return handleRedirect(next, response, maxRetries); + }, + }; +} +async function handleRedirect(next, response, maxRetries, currentRetries = 0) { + const { request, status, headers } = response; + const locationHeader = headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + currentRetries < maxRetries) { + const url = new URL(locationHeader, request.url); + request.url = url.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + request.headers.delete("Content-Length"); + delete request.body; + } + request.headers.delete("Authorization"); + const res = await next(request); + return handleRedirect(next, res, maxRetries, currentRetries + 1); + } + return response; +} +//# sourceMappingURL=redirectPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/retryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/retryPolicy.js new file mode 100644 index 000000000..f58396f45 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/retryPolicy.js @@ -0,0 +1,106 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { delay } from "../util/helpers.js"; +import { createClientLogger } from "@azure/logger"; +import { AbortError } from "@azure/abort-controller"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +const retryPolicyLogger = createClientLogger("core-rest-pipeline retryPolicy"); +/** + * The programmatic identifier of the retryPolicy. + */ +const retryPolicyName = "retryPolicy"; +/** + * retryPolicy is a generic policy to enable retrying requests when certain conditions are met + */ +export function retryPolicy(strategies, options = { maxRetries: DEFAULT_RETRY_POLICY_COUNT }) { + const logger = options.logger || retryPolicyLogger; + return { + name: retryPolicyName, + async sendRequest(request, next) { + var _a, _b; + let response; + let responseError; + let retryCount = -1; + // eslint-disable-next-line no-constant-condition + retryRequest: while (true) { + retryCount += 1; + response = undefined; + responseError = undefined; + try { + logger.info(`Retry ${retryCount}: Attempting to send request`, request.requestId); + response = await next(request); + logger.info(`Retry ${retryCount}: Received a response from request`, request.requestId); + } + catch (e) { + logger.error(`Retry ${retryCount}: Received an error from request`, request.requestId); + // RestErrors are valid targets for the retry strategies. + // If none of the retry strategies can work with them, they will be thrown later in this policy. + // If the received error is not a RestError, it is immediately thrown. + responseError = e; + if (!e || responseError.name !== "RestError") { + throw e; + } + response = responseError.response; + } + if ((_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { + logger.error(`Retry ${retryCount}: Request aborted.`); + const abortError = new AbortError(); + throw abortError; + } + if (retryCount >= ((_b = options.maxRetries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_POLICY_COUNT)) { + logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); + if (responseError) { + throw responseError; + } + else if (response) { + return response; + } + else { + throw new Error("Maximum retries reached with no response or error to throw"); + } + } + logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); + strategiesLoop: for (const strategy of strategies) { + const strategyLogger = strategy.logger || retryPolicyLogger; + strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); + const modifiers = strategy.retry({ + retryCount, + response, + responseError, + }); + if (modifiers.skipStrategy) { + strategyLogger.info(`Retry ${retryCount}: Skipped.`); + continue strategiesLoop; + } + const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; + if (errorToThrow) { + strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); + throw errorToThrow; + } + if (retryAfterInMs || retryAfterInMs === 0) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); + await delay(retryAfterInMs, undefined, { abortSignal: request.abortSignal }); + continue retryRequest; + } + if (redirectTo) { + strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); + request.url = redirectTo; + continue retryRequest; + } + } + if (responseError) { + logger.info(`None of the retry strategies could work with the received error. Throwing it.`); + throw responseError; + } + if (response) { + logger.info(`None of the retry strategies could work with the received response. Returning it.`); + return response; + } + // If all the retries skip and there's no response, + // we're still in the retry loop, so a new request will be sent + // until `maxRetries` is reached. + } + }, + }; +} +//# sourceMappingURL=retryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/setClientRequestIdPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/setClientRequestIdPolicy.js new file mode 100644 index 000000000..46baba4cd --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/setClientRequestIdPolicy.js @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the setClientRequestIdPolicy. + */ +export const setClientRequestIdPolicyName = "setClientRequestIdPolicy"; +/** + * Each PipelineRequest gets a unique id upon creation. + * This policy passes that unique id along via an HTTP header to enable better + * telemetry and tracing. + * @param requestIdHeaderName - The name of the header to pass the request ID to. + */ +export function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { + return { + name: setClientRequestIdPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(requestIdHeaderName)) { + request.headers.set(requestIdHeaderName, request.requestId); + } + return next(request); + }, + }; +} +//# sourceMappingURL=setClientRequestIdPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/systemErrorRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/systemErrorRetryPolicy.js new file mode 100644 index 000000000..f6b608a75 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/systemErrorRetryPolicy.js @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { exponentialRetryStrategy } from "../retryStrategies/exponentialRetryStrategy.js"; +import { retryPolicy } from "./retryPolicy.js"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +/** + * Name of the {@link systemErrorRetryPolicy} + */ +export const systemErrorRetryPolicyName = "systemErrorRetryPolicy"; +/** + * A retry policy that specifically seeks to handle errors in the + * underlying transport layer (e.g. DNS lookup failures) rather than + * retryable error codes from the server itself. + * @param options - Options that customize the policy. + */ +export function systemErrorRetryPolicy(options = {}) { + var _a; + return { + name: systemErrorRetryPolicyName, + sendRequest: retryPolicy([ + exponentialRetryStrategy(Object.assign(Object.assign({}, options), { ignoreHttpStatusCodes: true })), + ], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=systemErrorRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/throttlingRetryPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/throttlingRetryPolicy.js new file mode 100644 index 000000000..9349e6e07 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/throttlingRetryPolicy.js @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { throttlingRetryStrategy } from "../retryStrategies/throttlingRetryStrategy.js"; +import { retryPolicy } from "./retryPolicy.js"; +import { DEFAULT_RETRY_POLICY_COUNT } from "../constants.js"; +/** + * Name of the {@link throttlingRetryPolicy} + */ +export const throttlingRetryPolicyName = "throttlingRetryPolicy"; +/** + * A policy that retries when the server sends a 429 response with a Retry-After header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * + * @param options - Options that configure retry logic. + */ +export function throttlingRetryPolicy(options = {}) { + var _a; + return { + name: throttlingRetryPolicyName, + sendRequest: retryPolicy([throttlingRetryStrategy()], { + maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_POLICY_COUNT, + }).sendRequest, + }; +} +//# sourceMappingURL=throttlingRetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/tlsPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/tlsPolicy.js new file mode 100644 index 000000000..67ce535c6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/tlsPolicy.js @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Name of the TLS Policy + */ +export const tlsPolicyName = "tlsPolicy"; +/** + * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication. + */ +export function tlsPolicy(tlsSettings) { + return { + name: tlsPolicyName, + sendRequest: async (req, next) => { + // Users may define a request tlsSettings, honor those over the client level one + if (!req.tlsSettings) { + req.tlsSettings = tlsSettings; + } + return next(req); + }, + }; +} +//# sourceMappingURL=tlsPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/tracingPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/tracingPolicy.js new file mode 100644 index 000000000..d077e7733 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/tracingPolicy.js @@ -0,0 +1,120 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createTracingClient, } from "@azure/core-tracing"; +import { SDK_VERSION } from "../constants.js"; +import { getUserAgentValue } from "../util/userAgent.js"; +import { logger } from "../log.js"; +import { getErrorMessage, isError } from "@azure/core-util"; +import { isRestError } from "../restError.js"; +/** + * The programmatic identifier of the tracingPolicy. + */ +export const tracingPolicyName = "tracingPolicy"; +/** + * A simple policy to create OpenTelemetry Spans for each request made by the pipeline + * that has SpanOptions with a parent. + * Requests made without a parent Span will not be recorded. + * @param options - Options to configure the telemetry logged by the tracing policy. + */ +export function tracingPolicy(options = {}) { + const userAgent = getUserAgentValue(options.userAgentPrefix); + const tracingClient = tryCreateTracingClient(); + return { + name: tracingPolicyName, + async sendRequest(request, next) { + var _a, _b; + if (!tracingClient || !((_a = request.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext)) { + return next(request); + } + const { span, tracingContext } = (_b = tryCreateSpan(tracingClient, request, userAgent)) !== null && _b !== void 0 ? _b : {}; + if (!span || !tracingContext) { + return next(request); + } + try { + const response = await tracingClient.withContext(tracingContext, next, request); + tryProcessResponse(span, response); + return response; + } + catch (err) { + tryProcessError(span, err); + throw err; + } + }, + }; +} +function tryCreateTracingClient() { + try { + return createTracingClient({ + namespace: "", + packageName: "@azure/core-rest-pipeline", + packageVersion: SDK_VERSION, + }); + } + catch (e) { + logger.warning(`Error when creating the TracingClient: ${getErrorMessage(e)}`); + return undefined; + } +} +function tryCreateSpan(tracingClient, request, userAgent) { + try { + // As per spec, we do not need to differentiate between HTTP and HTTPS in span name. + const { span, updatedOptions } = tracingClient.startSpan(`HTTP ${request.method}`, { tracingOptions: request.tracingOptions }, { + spanKind: "client", + spanAttributes: { + "http.method": request.method, + "http.url": request.url, + requestId: request.requestId, + }, + }); + // If the span is not recording, don't do any more work. + if (!span.isRecording()) { + span.end(); + return undefined; + } + if (userAgent) { + span.setAttribute("http.user_agent", userAgent); + } + // set headers + const headers = tracingClient.createRequestHeaders(updatedOptions.tracingOptions.tracingContext); + for (const [key, value] of Object.entries(headers)) { + request.headers.set(key, value); + } + return { span, tracingContext: updatedOptions.tracingOptions.tracingContext }; + } + catch (e) { + logger.warning(`Skipping creating a tracing span due to an error: ${getErrorMessage(e)}`); + return undefined; + } +} +function tryProcessError(span, error) { + try { + span.setStatus({ + status: "error", + error: isError(error) ? error : undefined, + }); + if (isRestError(error) && error.statusCode) { + span.setAttribute("http.status_code", error.statusCode); + } + span.end(); + } + catch (e) { + logger.warning(`Skipping tracing span processing due to an error: ${getErrorMessage(e)}`); + } +} +function tryProcessResponse(span, response) { + try { + span.setAttribute("http.status_code", response.status); + const serviceRequestId = response.headers.get("x-ms-request-id"); + if (serviceRequestId) { + span.setAttribute("serviceRequestId", serviceRequestId); + } + span.setStatus({ + status: "success", + }); + span.end(); + } + catch (e) { + logger.warning(`Skipping tracing span processing due to an error: ${getErrorMessage(e)}`); + } +} +//# sourceMappingURL=tracingPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/userAgentPolicy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/userAgentPolicy.js new file mode 100644 index 000000000..10ae6852c --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/policies/userAgentPolicy.js @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getUserAgentHeaderName, getUserAgentValue } from "../util/userAgent.js"; +const UserAgentHeaderName = getUserAgentHeaderName(); +/** + * The programmatic identifier of the userAgentPolicy. + */ +export const userAgentPolicyName = "userAgentPolicy"; +/** + * A policy that sets the User-Agent header (or equivalent) to reflect + * the library version. + * @param options - Options to customize the user agent value. + */ +export function userAgentPolicy(options = {}) { + const userAgentValue = getUserAgentValue(options.userAgentPrefix); + return { + name: userAgentPolicyName, + async sendRequest(request, next) { + if (!request.headers.has(UserAgentHeaderName)) { + request.headers.set(UserAgentHeaderName, userAgentValue); + } + return next(request); + }, + }; +} +//# sourceMappingURL=userAgentPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/restError.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/restError.js new file mode 100644 index 000000000..9008050e6 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/restError.js @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isError } from "@azure/core-util"; +import { custom } from "./util/inspect.js"; +import { Sanitizer } from "./util/sanitizer.js"; +const errorSanitizer = new Sanitizer(); +/** + * A custom error type for failed pipeline requests. + */ +export class RestError extends Error { + constructor(message, options = {}) { + super(message); + this.name = "RestError"; + this.code = options.code; + this.statusCode = options.statusCode; + this.request = options.request; + this.response = options.response; + Object.setPrototypeOf(this, RestError.prototype); + } + /** + * Logging method for util.inspect in Node + */ + [custom]() { + return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; + } +} +/** + * Something went wrong when making the request. + * This means the actual request failed for some reason, + * such as a DNS issue or the connection being lost. + */ +RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * This means that parsing the response from the server failed. + * It may have been malformed. + */ +RestError.PARSE_ERROR = "PARSE_ERROR"; +/** + * Typeguard for RestError + * @param e - Something caught by a catch clause. + */ +export function isRestError(e) { + if (e instanceof RestError) { + return true; + } + return isError(e) && e.name === "RestError"; +} +//# sourceMappingURL=restError.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/retryStrategies/exponentialRetryStrategy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/retryStrategies/exponentialRetryStrategy.js new file mode 100644 index 000000000..c1e5c0d4e --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/retryStrategies/exponentialRetryStrategy.js @@ -0,0 +1,70 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getRandomIntegerInclusive } from "@azure/core-util"; +import { isThrottlingRetryResponse } from "./throttlingRetryStrategy.js"; +// intervals are in milliseconds +const DEFAULT_CLIENT_RETRY_INTERVAL = 1000; +const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 64; +/** + * A retry strategy that retries with an exponentially increasing delay in these two cases: + * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). + * - Or otherwise if the outgoing request fails (408, greater or equal than 500, except for 501 and 505). + */ +export function exponentialRetryStrategy(options = {}) { + var _a, _b; + const retryInterval = (_a = options.retryDelayInMs) !== null && _a !== void 0 ? _a : DEFAULT_CLIENT_RETRY_INTERVAL; + const maxRetryInterval = (_b = options.maxRetryDelayInMs) !== null && _b !== void 0 ? _b : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; + let retryAfterInMs = retryInterval; + return { + name: "exponentialRetryStrategy", + retry({ retryCount, response, responseError }) { + const matchedSystemError = isSystemError(responseError); + const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; + const isExponential = isExponentialRetryResponse(response); + const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; + const unknownResponse = response && (isThrottlingRetryResponse(response) || !isExponential); + if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { + return { skipStrategy: true }; + } + if (responseError && !matchedSystemError && !isExponential) { + return { errorToThrow: responseError }; + } + // Exponentially increase the delay each time + const exponentialDelay = retryAfterInMs * Math.pow(2, retryCount); + // Don't let the delay exceed the maximum + const clampedExponentialDelay = Math.min(maxRetryInterval, exponentialDelay); + // Allow the final value to have some "jitter" (within 50% of the delay size) so + // that retries across multiple clients don't occur simultaneously. + retryAfterInMs = + clampedExponentialDelay / 2 + getRandomIntegerInclusive(0, clampedExponentialDelay / 2); + return { retryAfterInMs }; + }, + }; +} +/** + * A response is a retry response if it has status codes: + * - 408, or + * - Greater or equal than 500, except for 501 and 505. + */ +export function isExponentialRetryResponse(response) { + return Boolean(response && + response.status !== undefined && + (response.status >= 500 || response.status === 408) && + response.status !== 501 && + response.status !== 505); +} +/** + * Determines whether an error from a pipeline response was triggered in the network layer. + */ +export function isSystemError(err) { + if (!err) { + return false; + } + return (err.code === "ETIMEDOUT" || + err.code === "ESOCKETTIMEDOUT" || + err.code === "ECONNREFUSED" || + err.code === "ECONNRESET" || + err.code === "ENOENT" || + err.code === "ENOTFOUND"); +} +//# sourceMappingURL=exponentialRetryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/retryStrategies/retryStrategy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/retryStrategies/retryStrategy.js new file mode 100644 index 000000000..4b2354b00 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/retryStrategies/retryStrategy.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=retryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/retryStrategies/throttlingRetryStrategy.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/retryStrategies/throttlingRetryStrategy.js new file mode 100644 index 000000000..5990ccc90 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/retryStrategies/throttlingRetryStrategy.js @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { parseHeaderValueAsNumber } from "../util/helpers.js"; +/** + * The header that comes back from Azure services representing + * the amount of time (minimum) to wait to retry (in seconds or timestamp after which we can retry). + */ +const RetryAfterHeader = "Retry-After"; +/** + * The headers that come back from Azure services representing + * the amount of time (minimum) to wait to retry. + * + * "retry-after-ms", "x-ms-retry-after-ms" : milliseconds + * "Retry-After" : seconds or timestamp + */ +const AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; +/** + * A response is a throttling retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + * + * Returns the `retryAfterInMs` value if the response is a throttling retry response. + * If not throttling retry response, returns `undefined`. + * + * @internal + */ +function getRetryAfterInMs(response) { + if (!(response && [429, 503].includes(response.status))) + return undefined; + try { + // Headers: "retry-after-ms", "x-ms-retry-after-ms", "Retry-After" + for (const header of AllRetryAfterHeaders) { + const retryAfterValue = parseHeaderValueAsNumber(response, header); + if (retryAfterValue === 0 || retryAfterValue) { + // "Retry-After" header ==> seconds + // "retry-after-ms", "x-ms-retry-after-ms" headers ==> milli-seconds + const multiplyingFactor = header === RetryAfterHeader ? 1000 : 1; + return retryAfterValue * multiplyingFactor; // in milli-seconds + } + } + // RetryAfterHeader ("Retry-After") has a special case where it might be formatted as a date instead of a number of seconds + const retryAfterHeader = response.headers.get(RetryAfterHeader); + if (!retryAfterHeader) + return; + const date = Date.parse(retryAfterHeader); + const diff = date - Date.now(); + // negative diff would mean a date in the past, so retry asap with 0 milliseconds + return Number.isFinite(diff) ? Math.max(0, diff) : undefined; + } + catch (e) { + return undefined; + } +} +/** + * A response is a retry response if it has a throttling status code (429 or 503), + * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. + */ +export function isThrottlingRetryResponse(response) { + return Number.isFinite(getRetryAfterInMs(response)); +} +export function throttlingRetryStrategy() { + return { + name: "throttlingRetryStrategy", + retry({ response }) { + const retryAfterInMs = getRetryAfterInMs(response); + if (!Number.isFinite(retryAfterInMs)) { + return { skipStrategy: true }; + } + return { + retryAfterInMs, + }; + }, + }; +} +//# sourceMappingURL=throttlingRetryStrategy.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/util/concat.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/concat.js new file mode 100644 index 000000000..3e820e883 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/concat.js @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { __asyncGenerator, __asyncValues, __await } from "tslib"; +import { Readable } from "node:stream"; +import { isBlob } from "./typeGuards.js"; +import { getRawContent } from "./file.js"; +function streamAsyncIterator() { + return __asyncGenerator(this, arguments, function* streamAsyncIterator_1() { + const reader = this.getReader(); + try { + while (true) { + const { done, value } = yield __await(reader.read()); + if (done) { + return yield __await(void 0); + } + yield yield __await(value); + } + } + finally { + reader.releaseLock(); + } + }); +} +function makeAsyncIterable(webStream) { + if (!webStream[Symbol.asyncIterator]) { + webStream[Symbol.asyncIterator] = streamAsyncIterator.bind(webStream); + } + if (!webStream.values) { + webStream.values = streamAsyncIterator.bind(webStream); + } +} +function ensureNodeStream(stream) { + if (stream instanceof ReadableStream) { + makeAsyncIterable(stream); + return Readable.fromWeb(stream); + } + else { + return stream; + } +} +function toStream(source) { + if (source instanceof Uint8Array) { + return Readable.from(Buffer.from(source)); + } + else if (isBlob(source)) { + return toStream(getRawContent(source)); + } + else { + return ensureNodeStream(source); + } +} +/** + * Utility function that concatenates a set of binary inputs into one combined output. + * + * @param sources - array of sources for the concatenation + * @returns - in Node, a (() =\> NodeJS.ReadableStream) which, when read, produces a concatenation of all the inputs. + * In browser, returns a `Blob` representing all the concatenated inputs. + * + * @internal + */ +export async function concat(sources) { + return function () { + const streams = sources.map((x) => (typeof x === "function" ? x() : x)).map(toStream); + return Readable.from((function () { + return __asyncGenerator(this, arguments, function* () { + var _a, e_1, _b, _c; + for (const stream of streams) { + try { + for (var _d = true, stream_1 = (e_1 = void 0, __asyncValues(stream)), stream_1_1; stream_1_1 = yield __await(stream_1.next()), _a = stream_1_1.done, !_a; _d = true) { + _c = stream_1_1.value; + _d = false; + const chunk = _c; + yield yield __await(chunk); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (!_d && !_a && (_b = stream_1.return)) yield __await(_b.call(stream_1)); + } + finally { if (e_1) throw e_1.error; } + } + } + }); + })()); + }; +} +//# sourceMappingURL=concat.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/util/file.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/file.js new file mode 100644 index 000000000..0f271810e --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/file.js @@ -0,0 +1,96 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNodeLike } from "@azure/core-util"; +import { isNodeReadableStream } from "./typeGuards.js"; +const unimplementedMethods = { + arrayBuffer: () => { + throw new Error("Not implemented"); + }, + slice: () => { + throw new Error("Not implemented"); + }, + text: () => { + throw new Error("Not implemented"); + }, +}; +/** + * Private symbol used as key on objects created using createFile containing the + * original source of the file object. + * + * This is used in Node to access the original Node stream without using Blob#stream, which + * returns a web stream. This is done to avoid a couple of bugs to do with Blob#stream and + * Readable#to/fromWeb in Node versions we support: + * - https://github.com/nodejs/node/issues/42694 (fixed in Node 18.14) + * - https://github.com/nodejs/node/issues/48916 (fixed in Node 20.6) + * + * Once these versions are no longer supported, we may be able to stop doing this. + * + * @internal + */ +const rawContent = Symbol("rawContent"); +function hasRawContent(x) { + return typeof x[rawContent] === "function"; +} +/** + * Extract the raw content from a given blob-like object. If the input was created using createFile + * or createFileFromStream, the exact content passed into createFile/createFileFromStream will be used. + * For true instances of Blob and File, returns the blob's content as a Web ReadableStream. + * + * @internal + */ +export function getRawContent(blob) { + if (hasRawContent(blob)) { + return blob[rawContent](); + } + else { + return blob.stream(); + } +} +/** + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function to: + * - Create a File object for use in RequestBodyType.formData in environments where the + * global File object is unavailable. + * - Create a File-like object from a readable stream without reading the stream into memory. + * + * @param stream - the content of the file as a callback returning a stream. When a File object made using createFile is + * passed in a request's form data map, the stream will not be read into memory + * and instead will be streamed when the request is made. In the event of a retry, the + * stream needs to be read again, so this callback SHOULD return a fresh stream if possible. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. + */ +export function createFileFromStream(stream, name, options = {}) { + var _a, _b, _c, _d; + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: (_d = options.size) !== null && _d !== void 0 ? _d : -1, name, stream: () => { + const s = stream(); + if (isNodeReadableStream(s)) { + throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); + } + return s; + }, [rawContent]: stream }); +} +/** + * Create an object that implements the File interface. This object is intended to be + * passed into RequestBodyType.formData, and is not guaranteed to work as expected in + * other situations. + * + * Use this function create a File object for use in RequestBodyType.formData in environments where the global File object is unavailable. + * + * @param content - the content of the file as a Uint8Array in memory. + * @param name - the name of the file. + * @param options - optional metadata about the file, e.g. file name, file size, MIME type. + */ +export function createFile(content, name, options = {}) { + var _a, _b, _c; + if (isNodeLike) { + return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: content.byteLength, name, arrayBuffer: async () => content.buffer, stream: () => new Blob([content]).stream(), [rawContent]: () => content }); + } + else { + return new File([content], name, options); + } +} +//# sourceMappingURL=file.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/util/helpers.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/helpers.js new file mode 100644 index 000000000..a1c1183ce --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/helpers.js @@ -0,0 +1,58 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +const StandardAbortMessage = "The operation was aborted."; +/** + * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. + * @param delayInMs - The number of milliseconds to be delayed. + * @param value - The value to be resolved with after a timeout of t milliseconds. + * @param options - The options for delay - currently abort options + * - abortSignal - The abortSignal associated with containing operation. + * - abortErrorMsg - The abort error message associated with containing operation. + * @returns Resolved promise + */ +export function delay(delayInMs, value, options) { + return new Promise((resolve, reject) => { + let timer = undefined; + let onAborted = undefined; + const rejectOnAbort = () => { + return reject(new AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + }; + const removeListeners = () => { + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { + options.abortSignal.removeEventListener("abort", onAborted); + } + }; + onAborted = () => { + if (timer) { + clearTimeout(timer); + } + removeListeners(); + return rejectOnAbort(); + }; + if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { + return rejectOnAbort(); + } + timer = setTimeout(() => { + removeListeners(); + resolve(value); + }, delayInMs); + if (options === null || options === void 0 ? void 0 : options.abortSignal) { + options.abortSignal.addEventListener("abort", onAborted); + } + }); +} +/** + * @internal + * @returns the parsed value or undefined if the parsed value is invalid. + */ +export function parseHeaderValueAsNumber(response, headerName) { + const value = response.headers.get(headerName); + if (!value) + return; + const valueAsNum = Number(value); + if (Number.isNaN(valueAsNum)) + return; + return valueAsNum; +} +//# sourceMappingURL=helpers.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/util/inspect.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/inspect.js new file mode 100644 index 000000000..c4012807c --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/inspect.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { inspect } from "node:util"; +export const custom = inspect.custom; +//# sourceMappingURL=inspect.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/util/sanitizer.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/sanitizer.js new file mode 100644 index 000000000..4ea4b25b1 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/sanitizer.js @@ -0,0 +1,139 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isObject } from "@azure/core-util"; +const RedactedString = "REDACTED"; +// Make sure this list is up-to-date with the one under core/logger/Readme#Keyconcepts +const defaultAllowedHeaderNames = [ + "x-ms-client-request-id", + "x-ms-return-client-request-id", + "x-ms-useragent", + "x-ms-correlation-request-id", + "x-ms-request-id", + "client-request-id", + "ms-cv", + "return-client-request-id", + "traceparent", + "Access-Control-Allow-Credentials", + "Access-Control-Allow-Headers", + "Access-Control-Allow-Methods", + "Access-Control-Allow-Origin", + "Access-Control-Expose-Headers", + "Access-Control-Max-Age", + "Access-Control-Request-Headers", + "Access-Control-Request-Method", + "Origin", + "Accept", + "Accept-Encoding", + "Cache-Control", + "Connection", + "Content-Length", + "Content-Type", + "Date", + "ETag", + "Expires", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "Last-Modified", + "Pragma", + "Request-Id", + "Retry-After", + "Server", + "Transfer-Encoding", + "User-Agent", + "WWW-Authenticate", +]; +const defaultAllowedQueryParameters = ["api-version"]; +/** + * @internal + */ +export class Sanitizer { + constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [], } = {}) { + allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); + allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); + this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); + this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); + } + sanitize(obj) { + const seen = new Set(); + return JSON.stringify(obj, (key, value) => { + // Ensure Errors include their interesting non-enumerable members + if (value instanceof Error) { + return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); + } + if (key === "headers") { + return this.sanitizeHeaders(value); + } + else if (key === "url") { + return this.sanitizeUrl(value); + } + else if (key === "query") { + return this.sanitizeQuery(value); + } + else if (key === "body") { + // Don't log the request body + return undefined; + } + else if (key === "response") { + // Don't log response again + return undefined; + } + else if (key === "operationSpec") { + // When using sendOperationRequest, the request carries a massive + // field with the autorest spec. No need to log it. + return undefined; + } + else if (Array.isArray(value) || isObject(value)) { + if (seen.has(value)) { + return "[Circular]"; + } + seen.add(value); + } + return value; + }, 2); + } + sanitizeHeaders(obj) { + const sanitized = {}; + for (const key of Object.keys(obj)) { + if (this.allowedHeaderNames.has(key.toLowerCase())) { + sanitized[key] = obj[key]; + } + else { + sanitized[key] = RedactedString; + } + } + return sanitized; + } + sanitizeQuery(value) { + if (typeof value !== "object" || value === null) { + return value; + } + const sanitized = {}; + for (const k of Object.keys(value)) { + if (this.allowedQueryParameters.has(k.toLowerCase())) { + sanitized[k] = value[k]; + } + else { + sanitized[k] = RedactedString; + } + } + return sanitized; + } + sanitizeUrl(value) { + if (typeof value !== "string" || value === null) { + return value; + } + const url = new URL(value); + if (!url.search) { + return value; + } + for (const [key] of url.searchParams) { + if (!this.allowedQueryParameters.has(key.toLowerCase())) { + url.searchParams.set(key, RedactedString); + } + } + return url.toString(); + } +} +//# sourceMappingURL=sanitizer.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/util/tokenCycler.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/tokenCycler.js new file mode 100644 index 000000000..514c6cb16 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/tokenCycler.js @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { delay } from "./helpers.js"; +// Default options for the cycler if none are provided +export const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires + retryIntervalInMs: 3000, // Allow refresh attempts every 3s + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - A function that produces a promise of an access token that may fail by returning null. + * @param retryIntervalInMs - The time (in milliseconds) to wait between retry attempts. + * @param refreshTimeout - The timestamp after which the refresh attempt will fail, throwing an exception. + * @returns - A promise that, if it resolves, will resolve with an access token. + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < refreshTimeout) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +export function createTokenCycler(credential, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + let tenantId; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(scopes, getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + tenantId = getTokenOptions.tenantId; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + tenantId = undefined; + throw reason; + }); + } + return refreshWorker; + } + return async (scopes, tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + // If the tenantId passed in token options is different to the one we have + // Or if we are in claim challenge and the token was rejected and a new access token need to be issued, we need to + // refresh the token with the new tenantId or token. + const mustRefresh = tenantId !== tokenOptions.tenantId || Boolean(tokenOptions.claims) || cycler.mustRefresh; + if (mustRefresh) + return refresh(scopes, tokenOptions); + if (cycler.shouldRefresh) { + refresh(scopes, tokenOptions); + } + return token; + }; +} +//# sourceMappingURL=tokenCycler.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/util/typeGuards.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/typeGuards.js new file mode 100644 index 000000000..2baffad6a --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/typeGuards.js @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export function isNodeReadableStream(x) { + return Boolean(x && typeof x["pipe"] === "function"); +} +export function isWebReadableStream(x) { + return Boolean(x && + typeof x.getReader === "function" && + typeof x.tee === "function"); +} +export function isReadableStream(x) { + return isNodeReadableStream(x) || isWebReadableStream(x); +} +export function isBlob(x) { + return typeof x.stream === "function"; +} +//# sourceMappingURL=typeGuards.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/util/userAgent.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/userAgent.js new file mode 100644 index 000000000..afdddc19b --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/userAgent.js @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getHeaderName, setPlatformSpecificData } from "./userAgentPlatform.js"; +import { SDK_VERSION } from "../constants.js"; +function getUserAgentString(telemetryInfo) { + const parts = []; + for (const [key, value] of telemetryInfo) { + const token = value ? `${key}/${value}` : key; + parts.push(token); + } + return parts.join(" "); +} +/** + * @internal + */ +export function getUserAgentHeaderName() { + return getHeaderName(); +} +/** + * @internal + */ +export function getUserAgentValue(prefix) { + const runtimeInfo = new Map(); + runtimeInfo.set("core-rest-pipeline", SDK_VERSION); + setPlatformSpecificData(runtimeInfo); + const defaultAgent = getUserAgentString(runtimeInfo); + const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; + return userAgentValue; +} +//# sourceMappingURL=userAgent.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/util/userAgentPlatform.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/userAgentPlatform.js new file mode 100644 index 000000000..41dac2851 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/util/userAgentPlatform.js @@ -0,0 +1,21 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const { Platform } = await import("react-native"); +/** + * @internal + */ +export function getHeaderName() { + return "x-ms-useragent"; +} +/** + * @internal + */ +export function setPlatformSpecificData(map) { + var _a; + if ((_a = Platform.constants) === null || _a === void 0 ? void 0 : _a.reactNativeVersion) { + const { major, minor, patch } = Platform.constants.reactNativeVersion; + map.set("react-native", `${major}.${minor}.${patch}`); + } + map.set("OS", `${Platform.OS}-${Platform.Version}`); +} +//# sourceMappingURL=userAgentPlatform-react-native.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/dist/react-native/xhrHttpClient.js b/node_modules/@azure/core-rest-pipeline/dist/react-native/xhrHttpClient.js new file mode 100644 index 000000000..925d1d6ba --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/dist/react-native/xhrHttpClient.js @@ -0,0 +1,167 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +import { createHttpHeaders } from "./httpHeaders.js"; +import { RestError } from "./restError.js"; +import { isReadableStream } from "./util/typeGuards.js"; +/** + * A HttpClient implementation that uses XMLHttpRequest to send HTTP requests. + * @internal + */ +class XhrHttpClient { + /** + * Makes a request over an underlying transport layer and returns the response. + * @param request - The request to be made. + */ + async sendRequest(request) { + var _a; + const url = new URL(request.url); + const isInsecure = url.protocol !== "https:"; + if (isInsecure && !request.allowInsecureConnection) { + throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); + } + const xhr = new XMLHttpRequest(); + if (request.proxySettings) { + throw new Error("HTTP proxy is not supported in browser environment"); + } + const abortSignal = request.abortSignal; + if (abortSignal) { + if (abortSignal.aborted) { + throw new AbortError("The operation was aborted."); + } + const listener = () => { + xhr.abort(); + }; + abortSignal.addEventListener("abort", listener); + xhr.addEventListener("readystatechange", () => { + if (xhr.readyState === XMLHttpRequest.DONE) { + abortSignal.removeEventListener("abort", listener); + } + }); + } + addProgressListener(xhr.upload, request.onUploadProgress); + addProgressListener(xhr, request.onDownloadProgress); + xhr.open(request.method, request.url); + xhr.timeout = request.timeout; + xhr.withCredentials = request.withCredentials; + for (const [name, value] of request.headers) { + xhr.setRequestHeader(name, value); + } + xhr.responseType = ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.size) ? "blob" : "text"; + const body = typeof request.body === "function" ? request.body() : request.body; + if (isReadableStream(body)) { + throw new Error("streams are not supported in XhrHttpClient."); + } + xhr.send(body === undefined ? null : body); + if (xhr.responseType === "blob") { + return new Promise((resolve, reject) => { + handleBlobResponse(xhr, request, resolve, reject); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + else { + return new Promise(function (resolve, reject) { + xhr.addEventListener("load", () => resolve({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: xhr.responseText, + })); + rejectOnTerminalEvent(request, xhr, reject); + }); + } + } +} +function handleBlobResponse(xhr, request, res, rej) { + xhr.addEventListener("readystatechange", () => { + var _a, _b; + // Resolve as soon as headers are loaded + if (xhr.readyState === XMLHttpRequest.HEADERS_RECEIVED) { + if ( + // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code + ((_a = request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(Number.POSITIVE_INFINITY)) || + ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(xhr.status))) { + const blobBody = new Promise((resolve, reject) => { + xhr.addEventListener("load", () => { + resolve(xhr.response); + }); + rejectOnTerminalEvent(request, xhr, reject); + }); + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + blobBody, + }); + } + else { + xhr.addEventListener("load", () => { + // xhr.response is of Blob type if the request is sent with xhr.responseType === "blob" + // but the status code is not one of the stream response status codes, + // so treat it as text and convert from Blob to text + if (xhr.response) { + xhr.response + .text() + .then((text) => { + res({ + request: request, + status: xhr.status, + headers: parseHeaders(xhr), + bodyAsText: text, + }); + return; + }) + .catch((e) => { + rej(e); + }); + } + else { + res({ + request, + status: xhr.status, + headers: parseHeaders(xhr), + }); + } + }); + } + } + }); +} +function addProgressListener(xhr, listener) { + if (listener) { + xhr.addEventListener("progress", (rawEvent) => listener({ + loadedBytes: rawEvent.loaded, + })); + } +} +function parseHeaders(xhr) { + const responseHeaders = createHttpHeaders(); + const headerLines = xhr + .getAllResponseHeaders() + .trim() + .split(/[\r\n]+/); + for (const line of headerLines) { + const index = line.indexOf(":"); + const headerName = line.slice(0, index); + const headerValue = line.slice(index + 2); + responseHeaders.set(headerName, headerValue); + } + return responseHeaders; +} +function rejectOnTerminalEvent(request, xhr, reject) { + xhr.addEventListener("error", () => reject(new RestError(`Failed to send request to ${request.url}`, { + code: RestError.REQUEST_SEND_ERROR, + request, + }))); + const abortError = new AbortError("The operation was aborted."); + xhr.addEventListener("abort", () => reject(abortError)); + xhr.addEventListener("timeout", () => reject(abortError)); +} +/** + * Create a new HttpClient instance for the browser environment. + * @internal + */ +export function createXhrHttpClient() { + return new XhrHttpClient(); +} +//# sourceMappingURL=xhrHttpClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-rest-pipeline/package.json b/node_modules/@azure/core-rest-pipeline/package.json new file mode 100644 index 000000000..ad53392d7 --- /dev/null +++ b/node_modules/@azure/core-rest-pipeline/package.json @@ -0,0 +1,131 @@ +{ + "name": "@azure/core-rest-pipeline", + "version": "1.16.0", + "description": "Isomorphic client library for making HTTP requests in node.js and browser.", + "sdk-type": "client", + "type": "module", + "main": "./dist/commonjs/index.js", + "browser": "./dist/browser/index.js", + "types": "./dist/commonjs/index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "files": [ + "dist/", + "LICENSE", + "README.md" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "engines": { + "node": ">=18.0.0" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-rest-pipeline/", + "sideEffects": false, + "prettier": "@azure/eslint-plugin-azure-sdk/prettier.json", + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && dev-tool run extract-api", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "clean": "rimraf --glob dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tshy && dev-tool run extract-api", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"samples-dev/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src --ext .ts --ext .cts --ext .mts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tshy && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tshy && npm run unit-test:node && dev-tool run build-test && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "//metadata": { + "constantPaths": [ + { + "path": "src/constants.ts", + "prefix": "SDK_VERSION" + } + ], + "sampleConfiguration": { + "skipFolder": true, + "disableDocsMs": true, + "productName": "Azure SDK Core", + "productSlugs": [ + "azure" + ] + }, + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "dependencies": { + "@azure/abort-controller": "^2.0.0", + "@azure/core-auth": "^1.4.0", + "@azure/core-tracing": "^1.0.1", + "@azure/core-util": "^1.9.0", + "@azure/logger": "^1.0.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.0", + "tslib": "^2.6.2" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@azure-tools/vite-plugin-browser-test-map": "^1.0.0", + "@microsoft/api-extractor": "^7.40.3", + "@types/node": "^18.0.0", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "eslint": "^8.56.0", + "playwright": "^1.41.2", + "rimraf": "^5.0.5", + "tshy": "^1.13.0", + "typescript": "~5.4.5", + "vitest": "^1.3.1" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false + } +} diff --git a/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js b/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js deleted file mode 100644 index b021cccc2..000000000 --- a/node_modules/@azure/core-tracing/dist-esm/src/createSpan.js +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { SpanKind, setSpan, context as otContext, getTracer } from "./interfaces"; -import { trace, INVALID_SPAN_CONTEXT } from "@opentelemetry/api"; -export function isTracingDisabled() { - var _a; - if (typeof process === "undefined") { - // not supported in browser for now without polyfills - return false; - } - const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { - return false; - } - return Boolean(azureTracingDisabledValue); -} -/** - * Creates a function that can be used to create spans using the global tracer. - * - * Usage: - * - * ```typescript - * // once - * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); - * - * // in each operation - * const span = createSpan("deleteConfigurationSetting", operationOptions); - * // code... - * span.end(); - * ``` - * - * @hidden - * @param args - allows configuration of the prefix for each span as well as the az.namespace field. - */ -export function createSpanFunction(args) { - return function (operationName, operationOptions) { - const tracer = getTracer(); - const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; - const spanOptions = Object.assign({ kind: SpanKind.INTERNAL }, tracingOptions.spanOptions); - const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; - let span; - if (isTracingDisabled()) { - span = trace.wrapSpanContext(INVALID_SPAN_CONTEXT); - } - else { - span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); - } - if (args.namespace) { - span.setAttribute("az.namespace", args.namespace); - } - let newSpanOptions = tracingOptions.spanOptions || {}; - if (span.isRecording() && args.namespace) { - newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); - } - const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || otContext.active(), span) }); - const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); - return { - span, - updatedOptions: newOperationOptions - }; - }; -} -//# sourceMappingURL=createSpan.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/index.js b/node_modules/@azure/core-tracing/dist-esm/src/index.js deleted file mode 100644 index 065b66c99..000000000 --- a/node_modules/@azure/core-tracing/dist-esm/src/index.js +++ /dev/null @@ -1,9 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -// Tracers and wrappers -export { createSpanFunction } from "./createSpan"; -// Shared interfaces -export { context, getSpan, getSpanContext, getTracer, isSpanContextValid, setSpan, setSpanContext, SpanKind, SpanStatusCode } from "./interfaces"; -// Utilities -export { extractSpanContextFromTraceParentHeader, getTraceParentHeader } from "./utils/traceParentHeader"; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js b/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js deleted file mode 100644 index e251b1ff9..000000000 --- a/node_modules/@azure/core-tracing/dist-esm/src/interfaces.js +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { context as otContext, trace as otTrace } from "@opentelemetry/api"; -/** - * The kind of span. - */ -export var SpanKind; -(function (SpanKind) { - /** Default value. Indicates that the span is used internally. */ - SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; - /** - * Indicates that the span covers server-side handling of an RPC or other - * remote request. - */ - SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; - /** - * Indicates that the span covers the client-side wrapper around an RPC or - * other remote request. - */ - SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; - /** - * Indicates that the span describes producer sending a message to a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; - /** - * Indicates that the span describes consumer receiving a message from a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; -})(SpanKind || (SpanKind = {})); -/** - * Return the span if one exists - * - * @param context - context to get span from - */ -export function getSpan(context) { - return otTrace.getSpan(context); -} -/** - * Set the span on a context - * - * @param context - context to use as parent - * @param span - span to set active - */ -export function setSpan(context, span) { - return otTrace.setSpan(context, span); -} -/** - * Wrap span context in a NoopSpan and set as span in a new - * context - * - * @param context - context to set active span on - * @param spanContext - span context to be wrapped - */ -export function setSpanContext(context, spanContext) { - return otTrace.setSpanContext(context, spanContext); -} -/** - * Get the span context of the span if it exists. - * - * @param context - context to get values from - */ -export function getSpanContext(context) { - return otTrace.getSpanContext(context); -} -/** - * Returns true of the given {@link SpanContext} is valid. - * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. - * - * @param context - the {@link SpanContext} to validate. - * - * @returns true if the {@link SpanContext} is valid, false otherwise. - */ -export function isSpanContextValid(context) { - return otTrace.isSpanContextValid(context); -} -export function getTracer(name, version) { - return otTrace.getTracer(name || "azure/core-tracing", version); -} -/** Entrypoint for context API */ -export const context = otContext; -/** SpanStatusCode */ -export var SpanStatusCode; -(function (SpanStatusCode) { - /** - * The default status. - */ - SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; - /** - * The operation has been validated by an Application developer or - * Operator to have completed successfully. - */ - SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; - /** - * The operation contains an error. - */ - SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; -})(SpanStatusCode || (SpanStatusCode = {})); -//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js b/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js deleted file mode 100644 index 90657bafe..000000000 --- a/node_modules/@azure/core-tracing/dist-esm/src/utils/traceParentHeader.js +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const VERSION = "00"; -/** - * Generates a `SpanContext` given a `traceparent` header value. - * @param traceParent - Serialized span context data as a `traceparent` header value. - * @returns The `SpanContext` generated from the `traceparent` value. - */ -export function extractSpanContextFromTraceParentHeader(traceParentHeader) { - const parts = traceParentHeader.split("-"); - if (parts.length !== 4) { - return; - } - const [version, traceId, spanId, traceOptions] = parts; - if (version !== VERSION) { - return; - } - const traceFlags = parseInt(traceOptions, 16); - const spanContext = { - spanId, - traceId, - traceFlags - }; - return spanContext; -} -/** - * Generates a `traceparent` value given a span context. - * @param spanContext - Contains context for a specific span. - * @returns The `spanContext` represented as a `traceparent` value. - */ -export function getTraceParentHeader(spanContext) { - const missingFields = []; - if (!spanContext.traceId) { - missingFields.push("traceId"); - } - if (!spanContext.spanId) { - missingFields.push("spanId"); - } - if (missingFields.length) { - return; - } - const flags = spanContext.traceFlags || 0 /* NONE */; - const hexFlags = flags.toString(16); - const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; - // https://www.w3.org/TR/trace-context/#traceparent-header-field-values - return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; -} -//# sourceMappingURL=traceParentHeader.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/browser/index.js b/node_modules/@azure/core-tracing/dist/browser/index.js new file mode 100644 index 000000000..39ea0cead --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/browser/index.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { useInstrumenter } from "./instrumenter.js"; +export { createTracingClient } from "./tracingClient.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/browser/instrumenter.js b/node_modules/@azure/core-tracing/dist/browser/instrumenter.js new file mode 100644 index 000000000..6dab6efd8 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/browser/instrumenter.js @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createTracingContext } from "./tracingContext.js"; +import { state } from "./state.js"; +export function createDefaultTracingSpan() { + return { + end: () => { + // noop + }, + isRecording: () => false, + recordException: () => { + // noop + }, + setAttribute: () => { + // noop + }, + setStatus: () => { + // noop + }, + }; +} +export function createDefaultInstrumenter() { + return { + createRequestHeaders: () => { + return {}; + }, + parseTraceparentHeader: () => { + return undefined; + }, + startSpan: (_name, spanOptions) => { + return { + span: createDefaultTracingSpan(), + tracingContext: createTracingContext({ parentContext: spanOptions.tracingContext }), + }; + }, + withContext(_context, callback, ...callbackArgs) { + return callback(...callbackArgs); + }, + }; +} +/** + * Extends the Azure SDK with support for a given instrumenter implementation. + * + * @param instrumenter - The instrumenter implementation to use. + */ +export function useInstrumenter(instrumenter) { + state.instrumenterImplementation = instrumenter; +} +/** + * Gets the currently set instrumenter, a No-Op instrumenter by default. + * + * @returns The currently set instrumenter + */ +export function getInstrumenter() { + if (!state.instrumenterImplementation) { + state.instrumenterImplementation = createDefaultInstrumenter(); + } + return state.instrumenterImplementation; +} +//# sourceMappingURL=instrumenter.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/browser/interfaces.js b/node_modules/@azure/core-tracing/dist/browser/interfaces.js new file mode 100644 index 000000000..c0a2e2e65 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/browser/interfaces.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/browser/package.json b/node_modules/@azure/core-tracing/dist/browser/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-tracing/dist/browser/state.js b/node_modules/@azure/core-tracing/dist/browser/state.js new file mode 100644 index 000000000..2b2454046 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/browser/state.js @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Browser-only implementation of the module's state. The browser esm variant will not load the commonjs state, so we do not need to share state between the two. + */ +export const state = { + instrumenterImplementation: undefined, +}; +//# sourceMappingURL=state-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/browser/tracingClient.js b/node_modules/@azure/core-tracing/dist/browser/tracingClient.js new file mode 100644 index 000000000..5f63c5176 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/browser/tracingClient.js @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getInstrumenter } from "./instrumenter.js"; +import { knownContextKeys } from "./tracingContext.js"; +/** + * Creates a new tracing client. + * + * @param options - Options used to configure the tracing client. + * @returns - An instance of {@link TracingClient}. + */ +export function createTracingClient(options) { + const { namespace, packageName, packageVersion } = options; + function startSpan(name, operationOptions, spanOptions) { + var _a; + const startSpanResult = getInstrumenter().startSpan(name, Object.assign(Object.assign({}, spanOptions), { packageName: packageName, packageVersion: packageVersion, tracingContext: (_a = operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext })); + let tracingContext = startSpanResult.tracingContext; + const span = startSpanResult.span; + if (!tracingContext.getValue(knownContextKeys.namespace)) { + tracingContext = tracingContext.setValue(knownContextKeys.namespace, namespace); + } + span.setAttribute("az.namespace", tracingContext.getValue(knownContextKeys.namespace)); + const updatedOptions = Object.assign({}, operationOptions, { + tracingOptions: Object.assign(Object.assign({}, operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions), { tracingContext }), + }); + return { + span, + updatedOptions, + }; + } + async function withSpan(name, operationOptions, callback, spanOptions) { + const { span, updatedOptions } = startSpan(name, operationOptions, spanOptions); + try { + const result = await withContext(updatedOptions.tracingOptions.tracingContext, () => Promise.resolve(callback(updatedOptions, span))); + span.setStatus({ status: "success" }); + return result; + } + catch (err) { + span.setStatus({ status: "error", error: err }); + throw err; + } + finally { + span.end(); + } + } + function withContext(context, callback, ...callbackArgs) { + return getInstrumenter().withContext(context, callback, ...callbackArgs); + } + /** + * Parses a traceparent header value into a span identifier. + * + * @param traceparentHeader - The traceparent header to parse. + * @returns An implementation-specific identifier for the span. + */ + function parseTraceparentHeader(traceparentHeader) { + return getInstrumenter().parseTraceparentHeader(traceparentHeader); + } + /** + * Creates a set of request headers to propagate tracing information to a backend. + * + * @param tracingContext - The context containing the span to serialize. + * @returns The set of headers to add to a request. + */ + function createRequestHeaders(tracingContext) { + return getInstrumenter().createRequestHeaders(tracingContext); + } + return { + startSpan, + withSpan, + withContext, + parseTraceparentHeader, + createRequestHeaders, + }; +} +//# sourceMappingURL=tracingClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/browser/tracingContext.js b/node_modules/@azure/core-tracing/dist/browser/tracingContext.js new file mode 100644 index 000000000..efcf60950 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/browser/tracingContext.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** @internal */ +export const knownContextKeys = { + span: Symbol.for("@azure/core-tracing span"), + namespace: Symbol.for("@azure/core-tracing namespace"), +}; +/** + * Creates a new {@link TracingContext} with the given options. + * @param options - A set of known keys that may be set on the context. + * @returns A new {@link TracingContext} with the given options. + * + * @internal + */ +export function createTracingContext(options = {}) { + let context = new TracingContextImpl(options.parentContext); + if (options.span) { + context = context.setValue(knownContextKeys.span, options.span); + } + if (options.namespace) { + context = context.setValue(knownContextKeys.namespace, options.namespace); + } + return context; +} +/** @internal */ +export class TracingContextImpl { + constructor(initialContext) { + this._contextMap = + initialContext instanceof TracingContextImpl + ? new Map(initialContext._contextMap) + : new Map(); + } + setValue(key, value) { + const newContext = new TracingContextImpl(this); + newContext._contextMap.set(key, value); + return newContext; + } + getValue(key) { + return this._contextMap.get(key); + } + deleteValue(key) { + const newContext = new TracingContextImpl(this); + newContext._contextMap.delete(key); + return newContext; + } +} +//# sourceMappingURL=tracingContext.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/commonjs/index.js b/node_modules/@azure/core-tracing/dist/commonjs/index.js new file mode 100644 index 000000000..6c62f7fb8 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/commonjs/index.js @@ -0,0 +1,10 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createTracingClient = exports.useInstrumenter = void 0; +var instrumenter_js_1 = require("./instrumenter.js"); +Object.defineProperty(exports, "useInstrumenter", { enumerable: true, get: function () { return instrumenter_js_1.useInstrumenter; } }); +var tracingClient_js_1 = require("./tracingClient.js"); +Object.defineProperty(exports, "createTracingClient", { enumerable: true, get: function () { return tracingClient_js_1.createTracingClient; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/commonjs/instrumenter.js b/node_modules/@azure/core-tracing/dist/commonjs/instrumenter.js new file mode 100644 index 000000000..53c66f926 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/commonjs/instrumenter.js @@ -0,0 +1,67 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getInstrumenter = exports.useInstrumenter = exports.createDefaultInstrumenter = exports.createDefaultTracingSpan = void 0; +const tracingContext_js_1 = require("./tracingContext.js"); +const state_js_1 = require("./state.js"); +function createDefaultTracingSpan() { + return { + end: () => { + // noop + }, + isRecording: () => false, + recordException: () => { + // noop + }, + setAttribute: () => { + // noop + }, + setStatus: () => { + // noop + }, + }; +} +exports.createDefaultTracingSpan = createDefaultTracingSpan; +function createDefaultInstrumenter() { + return { + createRequestHeaders: () => { + return {}; + }, + parseTraceparentHeader: () => { + return undefined; + }, + startSpan: (_name, spanOptions) => { + return { + span: createDefaultTracingSpan(), + tracingContext: (0, tracingContext_js_1.createTracingContext)({ parentContext: spanOptions.tracingContext }), + }; + }, + withContext(_context, callback, ...callbackArgs) { + return callback(...callbackArgs); + }, + }; +} +exports.createDefaultInstrumenter = createDefaultInstrumenter; +/** + * Extends the Azure SDK with support for a given instrumenter implementation. + * + * @param instrumenter - The instrumenter implementation to use. + */ +function useInstrumenter(instrumenter) { + state_js_1.state.instrumenterImplementation = instrumenter; +} +exports.useInstrumenter = useInstrumenter; +/** + * Gets the currently set instrumenter, a No-Op instrumenter by default. + * + * @returns The currently set instrumenter + */ +function getInstrumenter() { + if (!state_js_1.state.instrumenterImplementation) { + state_js_1.state.instrumenterImplementation = createDefaultInstrumenter(); + } + return state_js_1.state.instrumenterImplementation; +} +exports.getInstrumenter = getInstrumenter; +//# sourceMappingURL=instrumenter.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/commonjs/interfaces.js b/node_modules/@azure/core-tracing/dist/commonjs/interfaces.js new file mode 100644 index 000000000..16f2397b5 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/commonjs/interfaces.js @@ -0,0 +1,5 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/commonjs/package.json b/node_modules/@azure/core-tracing/dist/commonjs/package.json new file mode 100644 index 000000000..5bbefffba --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/core-tracing/dist/commonjs/state.js b/node_modules/@azure/core-tracing/dist/commonjs/state.js new file mode 100644 index 000000000..e522b21bc --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/commonjs/state.js @@ -0,0 +1,14 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.state = void 0; +/** + * @internal + * + * Holds the singleton instrumenter, to be shared across CJS and ESM imports. + */ +exports.state = { + instrumenterImplementation: undefined, +}; +//# sourceMappingURL=state-cjs.cjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/commonjs/tracingClient.js b/node_modules/@azure/core-tracing/dist/commonjs/tracingClient.js new file mode 100644 index 000000000..4086e7b4b --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/commonjs/tracingClient.js @@ -0,0 +1,78 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createTracingClient = void 0; +const instrumenter_js_1 = require("./instrumenter.js"); +const tracingContext_js_1 = require("./tracingContext.js"); +/** + * Creates a new tracing client. + * + * @param options - Options used to configure the tracing client. + * @returns - An instance of {@link TracingClient}. + */ +function createTracingClient(options) { + const { namespace, packageName, packageVersion } = options; + function startSpan(name, operationOptions, spanOptions) { + var _a; + const startSpanResult = (0, instrumenter_js_1.getInstrumenter)().startSpan(name, Object.assign(Object.assign({}, spanOptions), { packageName: packageName, packageVersion: packageVersion, tracingContext: (_a = operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext })); + let tracingContext = startSpanResult.tracingContext; + const span = startSpanResult.span; + if (!tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)) { + tracingContext = tracingContext.setValue(tracingContext_js_1.knownContextKeys.namespace, namespace); + } + span.setAttribute("az.namespace", tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)); + const updatedOptions = Object.assign({}, operationOptions, { + tracingOptions: Object.assign(Object.assign({}, operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions), { tracingContext }), + }); + return { + span, + updatedOptions, + }; + } + async function withSpan(name, operationOptions, callback, spanOptions) { + const { span, updatedOptions } = startSpan(name, operationOptions, spanOptions); + try { + const result = await withContext(updatedOptions.tracingOptions.tracingContext, () => Promise.resolve(callback(updatedOptions, span))); + span.setStatus({ status: "success" }); + return result; + } + catch (err) { + span.setStatus({ status: "error", error: err }); + throw err; + } + finally { + span.end(); + } + } + function withContext(context, callback, ...callbackArgs) { + return (0, instrumenter_js_1.getInstrumenter)().withContext(context, callback, ...callbackArgs); + } + /** + * Parses a traceparent header value into a span identifier. + * + * @param traceparentHeader - The traceparent header to parse. + * @returns An implementation-specific identifier for the span. + */ + function parseTraceparentHeader(traceparentHeader) { + return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); + } + /** + * Creates a set of request headers to propagate tracing information to a backend. + * + * @param tracingContext - The context containing the span to serialize. + * @returns The set of headers to add to a request. + */ + function createRequestHeaders(tracingContext) { + return (0, instrumenter_js_1.getInstrumenter)().createRequestHeaders(tracingContext); + } + return { + startSpan, + withSpan, + withContext, + parseTraceparentHeader, + createRequestHeaders, + }; +} +exports.createTracingClient = createTracingClient; +//# sourceMappingURL=tracingClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/commonjs/tracingContext.js b/node_modules/@azure/core-tracing/dist/commonjs/tracingContext.js new file mode 100644 index 000000000..97a6a3f2f --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/commonjs/tracingContext.js @@ -0,0 +1,52 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TracingContextImpl = exports.createTracingContext = exports.knownContextKeys = void 0; +/** @internal */ +exports.knownContextKeys = { + span: Symbol.for("@azure/core-tracing span"), + namespace: Symbol.for("@azure/core-tracing namespace"), +}; +/** + * Creates a new {@link TracingContext} with the given options. + * @param options - A set of known keys that may be set on the context. + * @returns A new {@link TracingContext} with the given options. + * + * @internal + */ +function createTracingContext(options = {}) { + let context = new TracingContextImpl(options.parentContext); + if (options.span) { + context = context.setValue(exports.knownContextKeys.span, options.span); + } + if (options.namespace) { + context = context.setValue(exports.knownContextKeys.namespace, options.namespace); + } + return context; +} +exports.createTracingContext = createTracingContext; +/** @internal */ +class TracingContextImpl { + constructor(initialContext) { + this._contextMap = + initialContext instanceof TracingContextImpl + ? new Map(initialContext._contextMap) + : new Map(); + } + setValue(key, value) { + const newContext = new TracingContextImpl(this); + newContext._contextMap.set(key, value); + return newContext; + } + getValue(key) { + return this._contextMap.get(key); + } + deleteValue(key) { + const newContext = new TracingContextImpl(this); + newContext._contextMap.delete(key); + return newContext; + } +} +exports.TracingContextImpl = TracingContextImpl; +//# sourceMappingURL=tracingContext.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-tracing/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 000000000..6305f1798 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.43.1" + } + ] +} diff --git a/node_modules/@azure/core-tracing/dist/esm/index.js b/node_modules/@azure/core-tracing/dist/esm/index.js new file mode 100644 index 000000000..39ea0cead --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/esm/index.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { useInstrumenter } from "./instrumenter.js"; +export { createTracingClient } from "./tracingClient.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/esm/instrumenter.js b/node_modules/@azure/core-tracing/dist/esm/instrumenter.js new file mode 100644 index 000000000..6dab6efd8 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/esm/instrumenter.js @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createTracingContext } from "./tracingContext.js"; +import { state } from "./state.js"; +export function createDefaultTracingSpan() { + return { + end: () => { + // noop + }, + isRecording: () => false, + recordException: () => { + // noop + }, + setAttribute: () => { + // noop + }, + setStatus: () => { + // noop + }, + }; +} +export function createDefaultInstrumenter() { + return { + createRequestHeaders: () => { + return {}; + }, + parseTraceparentHeader: () => { + return undefined; + }, + startSpan: (_name, spanOptions) => { + return { + span: createDefaultTracingSpan(), + tracingContext: createTracingContext({ parentContext: spanOptions.tracingContext }), + }; + }, + withContext(_context, callback, ...callbackArgs) { + return callback(...callbackArgs); + }, + }; +} +/** + * Extends the Azure SDK with support for a given instrumenter implementation. + * + * @param instrumenter - The instrumenter implementation to use. + */ +export function useInstrumenter(instrumenter) { + state.instrumenterImplementation = instrumenter; +} +/** + * Gets the currently set instrumenter, a No-Op instrumenter by default. + * + * @returns The currently set instrumenter + */ +export function getInstrumenter() { + if (!state.instrumenterImplementation) { + state.instrumenterImplementation = createDefaultInstrumenter(); + } + return state.instrumenterImplementation; +} +//# sourceMappingURL=instrumenter.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/esm/interfaces.js b/node_modules/@azure/core-tracing/dist/esm/interfaces.js new file mode 100644 index 000000000..c0a2e2e65 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/esm/interfaces.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/esm/package.json b/node_modules/@azure/core-tracing/dist/esm/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-tracing/dist/esm/state.js b/node_modules/@azure/core-tracing/dist/esm/state.js new file mode 100644 index 000000000..1699f50a9 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/esm/state.js @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// @ts-expect-error The recommended approach to sharing module state between ESM and CJS. +// See https://github.com/isaacs/tshy/blob/main/README.md#module-local-state for additional information. +import { state as cjsState } from "../commonjs/state.js"; +/** + * Defines the shared state between CJS and ESM by re-exporting the CJS state. + */ +export const state = cjsState; +//# sourceMappingURL=state.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/esm/tracingClient.js b/node_modules/@azure/core-tracing/dist/esm/tracingClient.js new file mode 100644 index 000000000..5f63c5176 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/esm/tracingClient.js @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getInstrumenter } from "./instrumenter.js"; +import { knownContextKeys } from "./tracingContext.js"; +/** + * Creates a new tracing client. + * + * @param options - Options used to configure the tracing client. + * @returns - An instance of {@link TracingClient}. + */ +export function createTracingClient(options) { + const { namespace, packageName, packageVersion } = options; + function startSpan(name, operationOptions, spanOptions) { + var _a; + const startSpanResult = getInstrumenter().startSpan(name, Object.assign(Object.assign({}, spanOptions), { packageName: packageName, packageVersion: packageVersion, tracingContext: (_a = operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext })); + let tracingContext = startSpanResult.tracingContext; + const span = startSpanResult.span; + if (!tracingContext.getValue(knownContextKeys.namespace)) { + tracingContext = tracingContext.setValue(knownContextKeys.namespace, namespace); + } + span.setAttribute("az.namespace", tracingContext.getValue(knownContextKeys.namespace)); + const updatedOptions = Object.assign({}, operationOptions, { + tracingOptions: Object.assign(Object.assign({}, operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions), { tracingContext }), + }); + return { + span, + updatedOptions, + }; + } + async function withSpan(name, operationOptions, callback, spanOptions) { + const { span, updatedOptions } = startSpan(name, operationOptions, spanOptions); + try { + const result = await withContext(updatedOptions.tracingOptions.tracingContext, () => Promise.resolve(callback(updatedOptions, span))); + span.setStatus({ status: "success" }); + return result; + } + catch (err) { + span.setStatus({ status: "error", error: err }); + throw err; + } + finally { + span.end(); + } + } + function withContext(context, callback, ...callbackArgs) { + return getInstrumenter().withContext(context, callback, ...callbackArgs); + } + /** + * Parses a traceparent header value into a span identifier. + * + * @param traceparentHeader - The traceparent header to parse. + * @returns An implementation-specific identifier for the span. + */ + function parseTraceparentHeader(traceparentHeader) { + return getInstrumenter().parseTraceparentHeader(traceparentHeader); + } + /** + * Creates a set of request headers to propagate tracing information to a backend. + * + * @param tracingContext - The context containing the span to serialize. + * @returns The set of headers to add to a request. + */ + function createRequestHeaders(tracingContext) { + return getInstrumenter().createRequestHeaders(tracingContext); + } + return { + startSpan, + withSpan, + withContext, + parseTraceparentHeader, + createRequestHeaders, + }; +} +//# sourceMappingURL=tracingClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/esm/tracingContext.js b/node_modules/@azure/core-tracing/dist/esm/tracingContext.js new file mode 100644 index 000000000..efcf60950 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/esm/tracingContext.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** @internal */ +export const knownContextKeys = { + span: Symbol.for("@azure/core-tracing span"), + namespace: Symbol.for("@azure/core-tracing namespace"), +}; +/** + * Creates a new {@link TracingContext} with the given options. + * @param options - A set of known keys that may be set on the context. + * @returns A new {@link TracingContext} with the given options. + * + * @internal + */ +export function createTracingContext(options = {}) { + let context = new TracingContextImpl(options.parentContext); + if (options.span) { + context = context.setValue(knownContextKeys.span, options.span); + } + if (options.namespace) { + context = context.setValue(knownContextKeys.namespace, options.namespace); + } + return context; +} +/** @internal */ +export class TracingContextImpl { + constructor(initialContext) { + this._contextMap = + initialContext instanceof TracingContextImpl + ? new Map(initialContext._contextMap) + : new Map(); + } + setValue(key, value) { + const newContext = new TracingContextImpl(this); + newContext._contextMap.set(key, value); + return newContext; + } + getValue(key) { + return this._contextMap.get(key); + } + deleteValue(key) { + const newContext = new TracingContextImpl(this); + newContext._contextMap.delete(key); + return newContext; + } +} +//# sourceMappingURL=tracingContext.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/index.js b/node_modules/@azure/core-tracing/dist/index.js deleted file mode 100644 index 527bbb4f1..000000000 --- a/node_modules/@azure/core-tracing/dist/index.js +++ /dev/null @@ -1,219 +0,0 @@ -'use strict'; - -Object.defineProperty(exports, '__esModule', { value: true }); - -var api = require('@opentelemetry/api'); - -// Copyright (c) Microsoft Corporation. -(function (SpanKind) { - /** Default value. Indicates that the span is used internally. */ - SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; - /** - * Indicates that the span covers server-side handling of an RPC or other - * remote request. - */ - SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; - /** - * Indicates that the span covers the client-side wrapper around an RPC or - * other remote request. - */ - SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; - /** - * Indicates that the span describes producer sending a message to a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; - /** - * Indicates that the span describes consumer receiving a message from a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; -})(exports.SpanKind || (exports.SpanKind = {})); -/** - * Return the span if one exists - * - * @param context - context to get span from - */ -function getSpan(context) { - return api.trace.getSpan(context); -} -/** - * Set the span on a context - * - * @param context - context to use as parent - * @param span - span to set active - */ -function setSpan(context, span) { - return api.trace.setSpan(context, span); -} -/** - * Wrap span context in a NoopSpan and set as span in a new - * context - * - * @param context - context to set active span on - * @param spanContext - span context to be wrapped - */ -function setSpanContext(context, spanContext) { - return api.trace.setSpanContext(context, spanContext); -} -/** - * Get the span context of the span if it exists. - * - * @param context - context to get values from - */ -function getSpanContext(context) { - return api.trace.getSpanContext(context); -} -/** - * Returns true of the given {@link SpanContext} is valid. - * A valid {@link SpanContext} is one which has a valid trace ID and span ID as per the spec. - * - * @param context - the {@link SpanContext} to validate. - * - * @returns true if the {@link SpanContext} is valid, false otherwise. - */ -function isSpanContextValid(context) { - return api.trace.isSpanContextValid(context); -} -function getTracer(name, version) { - return api.trace.getTracer(name || "azure/core-tracing", version); -} -/** Entrypoint for context API */ -const context = api.context; -(function (SpanStatusCode) { - /** - * The default status. - */ - SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; - /** - * The operation has been validated by an Application developer or - * Operator to have completed successfully. - */ - SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; - /** - * The operation contains an error. - */ - SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; -})(exports.SpanStatusCode || (exports.SpanStatusCode = {})); - -// Copyright (c) Microsoft Corporation. -function isTracingDisabled() { - var _a; - if (typeof process === "undefined") { - // not supported in browser for now without polyfills - return false; - } - const azureTracingDisabledValue = (_a = process.env.AZURE_TRACING_DISABLED) === null || _a === void 0 ? void 0 : _a.toLowerCase(); - if (azureTracingDisabledValue === "false" || azureTracingDisabledValue === "0") { - return false; - } - return Boolean(azureTracingDisabledValue); -} -/** - * Creates a function that can be used to create spans using the global tracer. - * - * Usage: - * - * ```typescript - * // once - * const createSpan = createSpanFunction({ packagePrefix: "Azure.Data.AppConfiguration", namespace: "Microsoft.AppConfiguration" }); - * - * // in each operation - * const span = createSpan("deleteConfigurationSetting", operationOptions); - * // code... - * span.end(); - * ``` - * - * @hidden - * @param args - allows configuration of the prefix for each span as well as the az.namespace field. - */ -function createSpanFunction(args) { - return function (operationName, operationOptions) { - const tracer = getTracer(); - const tracingOptions = (operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) || {}; - const spanOptions = Object.assign({ kind: exports.SpanKind.INTERNAL }, tracingOptions.spanOptions); - const spanName = args.packagePrefix ? `${args.packagePrefix}.${operationName}` : operationName; - let span; - if (isTracingDisabled()) { - span = api.trace.wrapSpanContext(api.INVALID_SPAN_CONTEXT); - } - else { - span = tracer.startSpan(spanName, spanOptions, tracingOptions.tracingContext); - } - if (args.namespace) { - span.setAttribute("az.namespace", args.namespace); - } - let newSpanOptions = tracingOptions.spanOptions || {}; - if (span.isRecording() && args.namespace) { - newSpanOptions = Object.assign(Object.assign({}, tracingOptions.spanOptions), { attributes: Object.assign(Object.assign({}, spanOptions.attributes), { "az.namespace": args.namespace }) }); - } - const newTracingOptions = Object.assign(Object.assign({}, tracingOptions), { spanOptions: newSpanOptions, tracingContext: setSpan(tracingOptions.tracingContext || context.active(), span) }); - const newOperationOptions = Object.assign(Object.assign({}, operationOptions), { tracingOptions: newTracingOptions }); - return { - span, - updatedOptions: newOperationOptions - }; - }; -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const VERSION = "00"; -/** - * Generates a `SpanContext` given a `traceparent` header value. - * @param traceParent - Serialized span context data as a `traceparent` header value. - * @returns The `SpanContext` generated from the `traceparent` value. - */ -function extractSpanContextFromTraceParentHeader(traceParentHeader) { - const parts = traceParentHeader.split("-"); - if (parts.length !== 4) { - return; - } - const [version, traceId, spanId, traceOptions] = parts; - if (version !== VERSION) { - return; - } - const traceFlags = parseInt(traceOptions, 16); - const spanContext = { - spanId, - traceId, - traceFlags - }; - return spanContext; -} -/** - * Generates a `traceparent` value given a span context. - * @param spanContext - Contains context for a specific span. - * @returns The `spanContext` represented as a `traceparent` value. - */ -function getTraceParentHeader(spanContext) { - const missingFields = []; - if (!spanContext.traceId) { - missingFields.push("traceId"); - } - if (!spanContext.spanId) { - missingFields.push("spanId"); - } - if (missingFields.length) { - return; - } - const flags = spanContext.traceFlags || 0 /* NONE */; - const hexFlags = flags.toString(16); - const traceFlags = hexFlags.length === 1 ? `0${hexFlags}` : hexFlags; - // https://www.w3.org/TR/trace-context/#traceparent-header-field-values - return `${VERSION}-${spanContext.traceId}-${spanContext.spanId}-${traceFlags}`; -} - -exports.context = context; -exports.createSpanFunction = createSpanFunction; -exports.extractSpanContextFromTraceParentHeader = extractSpanContextFromTraceParentHeader; -exports.getSpan = getSpan; -exports.getSpanContext = getSpanContext; -exports.getTraceParentHeader = getTraceParentHeader; -exports.getTracer = getTracer; -exports.isSpanContextValid = isSpanContextValid; -exports.setSpan = setSpan; -exports.setSpanContext = setSpanContext; -//# sourceMappingURL=index.js.map diff --git a/node_modules/@azure/core-tracing/dist/react-native/index.js b/node_modules/@azure/core-tracing/dist/react-native/index.js new file mode 100644 index 000000000..39ea0cead --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/react-native/index.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { useInstrumenter } from "./instrumenter.js"; +export { createTracingClient } from "./tracingClient.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/react-native/instrumenter.js b/node_modules/@azure/core-tracing/dist/react-native/instrumenter.js new file mode 100644 index 000000000..6dab6efd8 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/react-native/instrumenter.js @@ -0,0 +1,60 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createTracingContext } from "./tracingContext.js"; +import { state } from "./state.js"; +export function createDefaultTracingSpan() { + return { + end: () => { + // noop + }, + isRecording: () => false, + recordException: () => { + // noop + }, + setAttribute: () => { + // noop + }, + setStatus: () => { + // noop + }, + }; +} +export function createDefaultInstrumenter() { + return { + createRequestHeaders: () => { + return {}; + }, + parseTraceparentHeader: () => { + return undefined; + }, + startSpan: (_name, spanOptions) => { + return { + span: createDefaultTracingSpan(), + tracingContext: createTracingContext({ parentContext: spanOptions.tracingContext }), + }; + }, + withContext(_context, callback, ...callbackArgs) { + return callback(...callbackArgs); + }, + }; +} +/** + * Extends the Azure SDK with support for a given instrumenter implementation. + * + * @param instrumenter - The instrumenter implementation to use. + */ +export function useInstrumenter(instrumenter) { + state.instrumenterImplementation = instrumenter; +} +/** + * Gets the currently set instrumenter, a No-Op instrumenter by default. + * + * @returns The currently set instrumenter + */ +export function getInstrumenter() { + if (!state.instrumenterImplementation) { + state.instrumenterImplementation = createDefaultInstrumenter(); + } + return state.instrumenterImplementation; +} +//# sourceMappingURL=instrumenter.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/react-native/interfaces.js b/node_modules/@azure/core-tracing/dist/react-native/interfaces.js new file mode 100644 index 000000000..c0a2e2e65 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/react-native/interfaces.js @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export {}; +//# sourceMappingURL=interfaces.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/react-native/package.json b/node_modules/@azure/core-tracing/dist/react-native/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-tracing/dist/react-native/state.js b/node_modules/@azure/core-tracing/dist/react-native/state.js new file mode 100644 index 000000000..1699f50a9 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/react-native/state.js @@ -0,0 +1,10 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// @ts-expect-error The recommended approach to sharing module state between ESM and CJS. +// See https://github.com/isaacs/tshy/blob/main/README.md#module-local-state for additional information. +import { state as cjsState } from "../commonjs/state.js"; +/** + * Defines the shared state between CJS and ESM by re-exporting the CJS state. + */ +export const state = cjsState; +//# sourceMappingURL=state.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/react-native/tracingClient.js b/node_modules/@azure/core-tracing/dist/react-native/tracingClient.js new file mode 100644 index 000000000..5f63c5176 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/react-native/tracingClient.js @@ -0,0 +1,74 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { getInstrumenter } from "./instrumenter.js"; +import { knownContextKeys } from "./tracingContext.js"; +/** + * Creates a new tracing client. + * + * @param options - Options used to configure the tracing client. + * @returns - An instance of {@link TracingClient}. + */ +export function createTracingClient(options) { + const { namespace, packageName, packageVersion } = options; + function startSpan(name, operationOptions, spanOptions) { + var _a; + const startSpanResult = getInstrumenter().startSpan(name, Object.assign(Object.assign({}, spanOptions), { packageName: packageName, packageVersion: packageVersion, tracingContext: (_a = operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext })); + let tracingContext = startSpanResult.tracingContext; + const span = startSpanResult.span; + if (!tracingContext.getValue(knownContextKeys.namespace)) { + tracingContext = tracingContext.setValue(knownContextKeys.namespace, namespace); + } + span.setAttribute("az.namespace", tracingContext.getValue(knownContextKeys.namespace)); + const updatedOptions = Object.assign({}, operationOptions, { + tracingOptions: Object.assign(Object.assign({}, operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions), { tracingContext }), + }); + return { + span, + updatedOptions, + }; + } + async function withSpan(name, operationOptions, callback, spanOptions) { + const { span, updatedOptions } = startSpan(name, operationOptions, spanOptions); + try { + const result = await withContext(updatedOptions.tracingOptions.tracingContext, () => Promise.resolve(callback(updatedOptions, span))); + span.setStatus({ status: "success" }); + return result; + } + catch (err) { + span.setStatus({ status: "error", error: err }); + throw err; + } + finally { + span.end(); + } + } + function withContext(context, callback, ...callbackArgs) { + return getInstrumenter().withContext(context, callback, ...callbackArgs); + } + /** + * Parses a traceparent header value into a span identifier. + * + * @param traceparentHeader - The traceparent header to parse. + * @returns An implementation-specific identifier for the span. + */ + function parseTraceparentHeader(traceparentHeader) { + return getInstrumenter().parseTraceparentHeader(traceparentHeader); + } + /** + * Creates a set of request headers to propagate tracing information to a backend. + * + * @param tracingContext - The context containing the span to serialize. + * @returns The set of headers to add to a request. + */ + function createRequestHeaders(tracingContext) { + return getInstrumenter().createRequestHeaders(tracingContext); + } + return { + startSpan, + withSpan, + withContext, + parseTraceparentHeader, + createRequestHeaders, + }; +} +//# sourceMappingURL=tracingClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/dist/react-native/tracingContext.js b/node_modules/@azure/core-tracing/dist/react-native/tracingContext.js new file mode 100644 index 000000000..efcf60950 --- /dev/null +++ b/node_modules/@azure/core-tracing/dist/react-native/tracingContext.js @@ -0,0 +1,47 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** @internal */ +export const knownContextKeys = { + span: Symbol.for("@azure/core-tracing span"), + namespace: Symbol.for("@azure/core-tracing namespace"), +}; +/** + * Creates a new {@link TracingContext} with the given options. + * @param options - A set of known keys that may be set on the context. + * @returns A new {@link TracingContext} with the given options. + * + * @internal + */ +export function createTracingContext(options = {}) { + let context = new TracingContextImpl(options.parentContext); + if (options.span) { + context = context.setValue(knownContextKeys.span, options.span); + } + if (options.namespace) { + context = context.setValue(knownContextKeys.namespace, options.namespace); + } + return context; +} +/** @internal */ +export class TracingContextImpl { + constructor(initialContext) { + this._contextMap = + initialContext instanceof TracingContextImpl + ? new Map(initialContext._contextMap) + : new Map(); + } + setValue(key, value) { + const newContext = new TracingContextImpl(this); + newContext._contextMap.set(key, value); + return newContext; + } + getValue(key) { + return this._contextMap.get(key); + } + deleteValue(key) { + const newContext = new TracingContextImpl(this); + newContext._contextMap.delete(key); + return newContext; + } +} +//# sourceMappingURL=tracingContext.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-tracing/package.json b/node_modules/@azure/core-tracing/package.json index 1cd7235e6..12c27ff71 100644 --- a/node_modules/@azure/core-tracing/package.json +++ b/node_modules/@azure/core-tracing/package.json @@ -1,43 +1,35 @@ { "name": "@azure/core-tracing", - "version": "1.0.0-preview.13", + "version": "1.1.2", "description": "Provides low-level interfaces and helper methods for tracing in Azure SDK", "sdk-type": "client", - "main": "dist/index.js", - "module": "dist-esm/src/index.js", - "browser": { - "./dist-esm/src/utils/global.js": "./dist-esm/src/utils/global.browser.js" - }, - "types": "types/core-tracing.d.ts", - "scripts": { - "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", - "build:samples": "echo Obsolete", - "build:test": "tsc -p . && rollup -c 2>&1", - "build": "tsc -p . && rollup -c 2>&1 && api-extractor run --local", - "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"*.{js,json}\"", - "clean": "rimraf dist dist-* temp types *.tgz *.log", - "docs": "typedoc --excludePrivate --excludeNotExported --excludeExternals --stripInternal --mode file --out ./dist/docs ./src", - "execute:samples": "echo skipped", - "extract-api": "tsc -p . && api-extractor run --local", - "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", - "integration-test:browser": "echo skipped", - "integration-test:node": "echo skipped", - "integration-test": "npm run integration-test:node && npm run integration-test:browser", - "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix --fix-type [problem,suggestion]", - "lint": "eslint package.json api-extractor.json src test --ext .ts", - "pack": "npm pack 2>&1", - "prebuild": "npm run clean", - "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", - "test:node": "npm run clean && tsc -p . && npm run unit-test:node && npm run integration-test:node", - "test": "npm run clean && tsc -p . && npm run unit-test:node && rollup -c 2>&1 && npm run unit-test:browser && npm run integration-test", - "unit-test:browser": "karma start --single-run", - "unit-test:node": "mocha -r esm -r ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace --exclude \"test/**/browser/*.spec.ts\" \"test/**/*.spec.ts\"", - "unit-test": "npm run unit-test:node && npm run unit-test:browser" + "type": "module", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "browser": "./dist/browser/index.js", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } }, "files": [ "dist/", - "dist-esm/src/", - "types/core-tracing.d.ts", "README.md", "LICENSE" ], @@ -53,47 +45,74 @@ "url": "https://github.com/Azure/azure-sdk-for-js/issues" }, "engines": { - "node": ">=12.0.0" + "node": ">=18.0.0" }, "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-tracing/README.md", "sideEffects": false, + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && api-extractor run --local", + "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "clean": "rimraf --glob dist dist-* temp types *.tgz *.log", + "execute:samples": "echo skipped", + "extract-api": "tshy && api-extractor run --local", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"samples-dev/**/*.{ts,cts,mts}\" \"*.{js,cjs,mjs,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tshy && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tshy && npm run unit-test:node && dev-tool run build-test && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, "dependencies": { - "@opentelemetry/api": "^1.0.1", - "tslib": "^2.2.0" + "tslib": "^2.6.2" }, "devDependencies": { "@azure/core-auth": "^1.3.0", "@azure/dev-tool": "^1.0.0", "@azure/eslint-plugin-azure-sdk": "^3.0.0", - "@microsoft/api-extractor": "7.7.11", - "@opentelemetry/tracing": "^0.22.0", - "@types/chai": "^4.1.6", - "@types/mocha": "^7.0.2", - "@types/node": "^12.0.0", - "chai": "^4.2.0", - "cross-env": "^7.0.2", - "eslint": "^7.15.0", - "inherits": "^2.0.3", - "karma": "^6.2.0", - "karma-chrome-launcher": "^3.0.0", - "karma-coverage": "^2.0.0", - "karma-edge-launcher": "^0.4.2", - "karma-env-preprocessor": "^0.1.1", - "karma-firefox-launcher": "^1.1.0", - "karma-ie-launcher": "^1.0.0", - "karma-junit-reporter": "^2.0.1", - "karma-mocha": "^2.0.1", - "karma-mocha-reporter": "^2.2.5", - "karma-sourcemap-loader": "^0.3.8", - "mocha": "^7.1.1", - "mocha-junit-reporter": "^1.18.0", - "prettier": "^1.16.4", - "rimraf": "^3.0.0", - "rollup": "^1.16.3", - "typescript": "~4.2.0", - "util": "^0.12.1", - "typedoc": "0.15.2", - "sinon": "^9.0.2", - "@types/sinon": "^9.0.4" + "@microsoft/api-extractor": "^7.40.3", + "@types/node": "^18.0.0", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "eslint": "^8.0.0", + "playwright": "^1.41.2", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tshy": "^1.13.0", + "typescript": "~5.3.3", + "vitest": "^1.3.1" + }, + "//metadata": { + "sampleConfiguration": { + "disableDocsMs": true, + "productName": "Azure SDK Core", + "productSlugs": [ + "azure" + ] + }, + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false } } diff --git a/node_modules/@azure/core-util/dist/browser/aborterUtils.js b/node_modules/@azure/core-util/dist/browser/aborterUtils.js index c34a95d10..3da2b2346 100644 --- a/node_modules/@azure/core-util/dist/browser/aborterUtils.js +++ b/node_modules/@azure/core-util/dist/browser/aborterUtils.js @@ -4,17 +4,18 @@ * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. */ export async function cancelablePromiseRace(abortablePromiseBuilders, options) { + var _a, _b; const aborter = new AbortController(); function abortHandler() { aborter.abort(); } - options?.abortSignal?.addEventListener("abort", abortHandler); + (_a = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _a === void 0 ? void 0 : _a.addEventListener("abort", abortHandler); try { return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); } finally { aborter.abort(); - options?.abortSignal?.removeEventListener("abort", abortHandler); + (_b = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _b === void 0 ? void 0 : _b.removeEventListener("abort", abortHandler); } } //# sourceMappingURL=aborterUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/checkEnvironment.js b/node_modules/@azure/core-util/dist/browser/checkEnvironment.js index 53ba62e52..b7feb2085 100644 --- a/node_modules/@azure/core-util/dist/browser/checkEnvironment.js +++ b/node_modules/@azure/core-util/dist/browser/checkEnvironment.js @@ -1,5 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +var _a, _b, _c, _d; /** * A constant that indicates whether the environment the code is running is a Web Browser. */ @@ -9,10 +10,10 @@ export const isBrowser = typeof window !== "undefined" && typeof window.document * A constant that indicates whether the environment the code is running is a Web Worker. */ export const isWebWorker = typeof self === "object" && - typeof self?.importScripts === "function" && - (self.constructor?.name === "DedicatedWorkerGlobalScope" || - self.constructor?.name === "ServiceWorkerGlobalScope" || - self.constructor?.name === "SharedWorkerGlobalScope"); + typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && + (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || + ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || + ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); /** * A constant that indicates whether the environment the code is running is Deno. */ @@ -24,17 +25,23 @@ export const isDeno = typeof Deno !== "undefined" && */ export const isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; /** - * A constant that indicates whether the environment the code is running is Node.JS. + * A constant that indicates whether the environment the code is running is a Node.js compatible environment. */ -export const isNode = typeof globalThis.process !== "undefined" && +export const isNodeLike = typeof globalThis.process !== "undefined" && Boolean(globalThis.process.version) && - Boolean(globalThis.process.versions?.node) && - // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions - !isDeno && - !isBun; + Boolean((_d = globalThis.process.versions) === null || _d === void 0 ? void 0 : _d.node); +/** + * A constant that indicates whether the environment the code is running is a Node.js compatible environment. + * @deprecated Use `isNodeLike` instead. + */ +export const isNode = isNodeLike; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +export const isNodeRuntime = isNodeLike && !isBun && !isDeno; /** * A constant that indicates whether the environment the code is running is in React-Native. */ // https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js -export const isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; +export const isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; //# sourceMappingURL=checkEnvironment.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/createAbortablePromise.js b/node_modules/@azure/core-util/dist/browser/createAbortablePromise.js index fc54e7c74..28c0d5aaa 100644 --- a/node_modules/@azure/core-util/dist/browser/createAbortablePromise.js +++ b/node_modules/@azure/core-util/dist/browser/createAbortablePromise.js @@ -8,20 +8,20 @@ import { AbortError } from "@azure/abort-controller"; * @returns A promise that can be aborted. */ export function createAbortablePromise(buildPromise, options) { - const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {}; + const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; return new Promise((resolve, reject) => { function rejectOnAbort() { - reject(new AbortError(abortErrorMsg ?? "The operation was aborted.")); + reject(new AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted.")); } function removeListeners() { - abortSignal?.removeEventListener("abort", onAbort); + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.removeEventListener("abort", onAbort); } function onAbort() { - cleanupBeforeAbort?.(); + cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 ? void 0 : cleanupBeforeAbort(); removeListeners(); rejectOnAbort(); } - if (abortSignal?.aborted) { + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { return rejectOnAbort(); } try { @@ -36,7 +36,7 @@ export function createAbortablePromise(buildPromise, options) { catch (err) { reject(err); } - abortSignal?.addEventListener("abort", onAbort); + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener("abort", onAbort); }); } //# sourceMappingURL=createAbortablePromise.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/delay.js b/node_modules/@azure/core-util/dist/browser/delay.js index 3e12ea5ed..b7480238c 100644 --- a/node_modules/@azure/core-util/dist/browser/delay.js +++ b/node_modules/@azure/core-util/dist/browser/delay.js @@ -10,13 +10,13 @@ const StandardAbortMessage = "The delay was aborted."; */ export function delay(timeInMs, options) { let token; - const { abortSignal, abortErrorMsg } = options ?? {}; + const { abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; return createAbortablePromise((resolve) => { token = setTimeout(resolve, timeInMs); }, { cleanupBeforeAbort: () => clearTimeout(token), abortSignal, - abortErrorMsg: abortErrorMsg ?? StandardAbortMessage, + abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage, }); } //# sourceMappingURL=delay.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/index.js b/node_modules/@azure/core-util/dist/browser/index.js index 64b7b80ac..19e3b5b23 100644 --- a/node_modules/@azure/core-util/dist/browser/index.js +++ b/node_modules/@azure/core-util/dist/browser/index.js @@ -9,6 +9,6 @@ export { isError, getErrorMessage } from "./error.js"; export { computeSha256Hash, computeSha256Hmac } from "./sha256.js"; export { isDefined, isObjectWithProperties, objectHasProperty } from "./typeGuards.js"; export { randomUUID } from "./uuidUtils.js"; -export { isBrowser, isBun, isNode, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; +export { isBrowser, isBun, isNode, isNodeLike, isNodeRuntime, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; export { uint8ArrayToString, stringToUint8Array } from "./bytesEncoding.js"; //# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/browser/uuidUtils.js b/node_modules/@azure/core-util/dist/browser/uuidUtils.js index d062906ce..e733ddc5a 100644 --- a/node_modules/@azure/core-util/dist/browser/uuidUtils.js +++ b/node_modules/@azure/core-util/dist/browser/uuidUtils.js @@ -1,8 +1,9 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +var _a; import { generateUUID } from "./uuidUtils.common.js"; // NOTE: This could be undefined if not used in a secure context -const uuidFunction = typeof globalThis?.crypto?.randomUUID === "function" +const uuidFunction = typeof ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.randomUUID) === "function" ? globalThis.crypto.randomUUID.bind(globalThis.crypto) : generateUUID; /** diff --git a/node_modules/@azure/core-util/dist/commonjs/aborterUtils.js b/node_modules/@azure/core-util/dist/commonjs/aborterUtils.js index d4b87c40a..93a6f80c1 100644 --- a/node_modules/@azure/core-util/dist/commonjs/aborterUtils.js +++ b/node_modules/@azure/core-util/dist/commonjs/aborterUtils.js @@ -7,17 +7,18 @@ exports.cancelablePromiseRace = void 0; * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. */ async function cancelablePromiseRace(abortablePromiseBuilders, options) { + var _a, _b; const aborter = new AbortController(); function abortHandler() { aborter.abort(); } - options?.abortSignal?.addEventListener("abort", abortHandler); + (_a = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _a === void 0 ? void 0 : _a.addEventListener("abort", abortHandler); try { return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); } finally { aborter.abort(); - options?.abortSignal?.removeEventListener("abort", abortHandler); + (_b = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _b === void 0 ? void 0 : _b.removeEventListener("abort", abortHandler); } } exports.cancelablePromiseRace = cancelablePromiseRace; diff --git a/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.js b/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.js index 7cfd4f551..3822c2c2c 100644 --- a/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.js +++ b/node_modules/@azure/core-util/dist/commonjs/checkEnvironment.js @@ -1,8 +1,9 @@ "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +var _a, _b, _c, _d; Object.defineProperty(exports, "__esModule", { value: true }); -exports.isReactNative = exports.isNode = exports.isBun = exports.isDeno = exports.isWebWorker = exports.isBrowser = void 0; +exports.isReactNative = exports.isNodeRuntime = exports.isNode = exports.isNodeLike = exports.isBun = exports.isDeno = exports.isWebWorker = exports.isBrowser = void 0; /** * A constant that indicates whether the environment the code is running is a Web Browser. */ @@ -12,10 +13,10 @@ exports.isBrowser = typeof window !== "undefined" && typeof window.document !== * A constant that indicates whether the environment the code is running is a Web Worker. */ exports.isWebWorker = typeof self === "object" && - typeof self?.importScripts === "function" && - (self.constructor?.name === "DedicatedWorkerGlobalScope" || - self.constructor?.name === "ServiceWorkerGlobalScope" || - self.constructor?.name === "SharedWorkerGlobalScope"); + typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && + (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || + ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || + ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); /** * A constant that indicates whether the environment the code is running is Deno. */ @@ -27,17 +28,23 @@ exports.isDeno = typeof Deno !== "undefined" && */ exports.isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; /** - * A constant that indicates whether the environment the code is running is Node.JS. + * A constant that indicates whether the environment the code is running is a Node.js compatible environment. */ -exports.isNode = typeof globalThis.process !== "undefined" && +exports.isNodeLike = typeof globalThis.process !== "undefined" && Boolean(globalThis.process.version) && - Boolean(globalThis.process.versions?.node) && - // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions - !exports.isDeno && - !exports.isBun; + Boolean((_d = globalThis.process.versions) === null || _d === void 0 ? void 0 : _d.node); +/** + * A constant that indicates whether the environment the code is running is a Node.js compatible environment. + * @deprecated Use `isNodeLike` instead. + */ +exports.isNode = exports.isNodeLike; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +exports.isNodeRuntime = exports.isNodeLike && !exports.isBun && !exports.isDeno; /** * A constant that indicates whether the environment the code is running is in React-Native. */ // https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js -exports.isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; +exports.isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; //# sourceMappingURL=checkEnvironment.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js b/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js index a3fdf1d49..20f176e29 100644 --- a/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js +++ b/node_modules/@azure/core-util/dist/commonjs/createAbortablePromise.js @@ -11,20 +11,20 @@ const abort_controller_1 = require("@azure/abort-controller"); * @returns A promise that can be aborted. */ function createAbortablePromise(buildPromise, options) { - const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {}; + const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; return new Promise((resolve, reject) => { function rejectOnAbort() { - reject(new abort_controller_1.AbortError(abortErrorMsg ?? "The operation was aborted.")); + reject(new abort_controller_1.AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted.")); } function removeListeners() { - abortSignal?.removeEventListener("abort", onAbort); + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.removeEventListener("abort", onAbort); } function onAbort() { - cleanupBeforeAbort?.(); + cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 ? void 0 : cleanupBeforeAbort(); removeListeners(); rejectOnAbort(); } - if (abortSignal?.aborted) { + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { return rejectOnAbort(); } try { @@ -39,7 +39,7 @@ function createAbortablePromise(buildPromise, options) { catch (err) { reject(err); } - abortSignal?.addEventListener("abort", onAbort); + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener("abort", onAbort); }); } exports.createAbortablePromise = createAbortablePromise; diff --git a/node_modules/@azure/core-util/dist/commonjs/delay.js b/node_modules/@azure/core-util/dist/commonjs/delay.js index 0183ef621..15b3304fb 100644 --- a/node_modules/@azure/core-util/dist/commonjs/delay.js +++ b/node_modules/@azure/core-util/dist/commonjs/delay.js @@ -13,13 +13,13 @@ const StandardAbortMessage = "The delay was aborted."; */ function delay(timeInMs, options) { let token; - const { abortSignal, abortErrorMsg } = options ?? {}; + const { abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; return (0, createAbortablePromise_js_1.createAbortablePromise)((resolve) => { token = setTimeout(resolve, timeInMs); }, { cleanupBeforeAbort: () => clearTimeout(token), abortSignal, - abortErrorMsg: abortErrorMsg ?? StandardAbortMessage, + abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage, }); } exports.delay = delay; diff --git a/node_modules/@azure/core-util/dist/commonjs/index.js b/node_modules/@azure/core-util/dist/commonjs/index.js index 11e2b23bc..078c148d4 100644 --- a/node_modules/@azure/core-util/dist/commonjs/index.js +++ b/node_modules/@azure/core-util/dist/commonjs/index.js @@ -2,7 +2,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", { value: true }); -exports.stringToUint8Array = exports.uint8ArrayToString = exports.isWebWorker = exports.isReactNative = exports.isDeno = exports.isNode = exports.isBun = exports.isBrowser = exports.randomUUID = exports.objectHasProperty = exports.isObjectWithProperties = exports.isDefined = exports.computeSha256Hmac = exports.computeSha256Hash = exports.getErrorMessage = exports.isError = exports.isObject = exports.getRandomIntegerInclusive = exports.createAbortablePromise = exports.cancelablePromiseRace = exports.delay = void 0; +exports.stringToUint8Array = exports.uint8ArrayToString = exports.isWebWorker = exports.isReactNative = exports.isDeno = exports.isNodeRuntime = exports.isNodeLike = exports.isNode = exports.isBun = exports.isBrowser = exports.randomUUID = exports.objectHasProperty = exports.isObjectWithProperties = exports.isDefined = exports.computeSha256Hmac = exports.computeSha256Hash = exports.getErrorMessage = exports.isError = exports.isObject = exports.getRandomIntegerInclusive = exports.createAbortablePromise = exports.cancelablePromiseRace = exports.delay = void 0; var delay_js_1 = require("./delay.js"); Object.defineProperty(exports, "delay", { enumerable: true, get: function () { return delay_js_1.delay; } }); var aborterUtils_js_1 = require("./aborterUtils.js"); @@ -29,6 +29,8 @@ var checkEnvironment_js_1 = require("./checkEnvironment.js"); Object.defineProperty(exports, "isBrowser", { enumerable: true, get: function () { return checkEnvironment_js_1.isBrowser; } }); Object.defineProperty(exports, "isBun", { enumerable: true, get: function () { return checkEnvironment_js_1.isBun; } }); Object.defineProperty(exports, "isNode", { enumerable: true, get: function () { return checkEnvironment_js_1.isNode; } }); +Object.defineProperty(exports, "isNodeLike", { enumerable: true, get: function () { return checkEnvironment_js_1.isNodeLike; } }); +Object.defineProperty(exports, "isNodeRuntime", { enumerable: true, get: function () { return checkEnvironment_js_1.isNodeRuntime; } }); Object.defineProperty(exports, "isDeno", { enumerable: true, get: function () { return checkEnvironment_js_1.isDeno; } }); Object.defineProperty(exports, "isReactNative", { enumerable: true, get: function () { return checkEnvironment_js_1.isReactNative; } }); Object.defineProperty(exports, "isWebWorker", { enumerable: true, get: function () { return checkEnvironment_js_1.isWebWorker; } }); diff --git a/node_modules/@azure/core-util/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-util/dist/commonjs/tsdoc-metadata.json index 22735db17..6305f1798 100644 --- a/node_modules/@azure/core-util/dist/commonjs/tsdoc-metadata.json +++ b/node_modules/@azure/core-util/dist/commonjs/tsdoc-metadata.json @@ -5,7 +5,7 @@ "toolPackages": [ { "packageName": "@microsoft/api-extractor", - "packageVersion": "7.42.3" + "packageVersion": "7.43.1" } ] } diff --git a/node_modules/@azure/core-util/dist/commonjs/uuidUtils.js b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.js index ae5f10689..a244f1209 100644 --- a/node_modules/@azure/core-util/dist/commonjs/uuidUtils.js +++ b/node_modules/@azure/core-util/dist/commonjs/uuidUtils.js @@ -1,11 +1,12 @@ "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +var _a; Object.defineProperty(exports, "__esModule", { value: true }); exports.randomUUID = void 0; const crypto_1 = require("crypto"); // NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+. -const uuidFunction = typeof globalThis?.crypto?.randomUUID === "function" +const uuidFunction = typeof ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.randomUUID) === "function" ? globalThis.crypto.randomUUID.bind(globalThis.crypto) : crypto_1.randomUUID; /** diff --git a/node_modules/@azure/core-util/dist/esm/aborterUtils.js b/node_modules/@azure/core-util/dist/esm/aborterUtils.js index c34a95d10..3da2b2346 100644 --- a/node_modules/@azure/core-util/dist/esm/aborterUtils.js +++ b/node_modules/@azure/core-util/dist/esm/aborterUtils.js @@ -4,17 +4,18 @@ * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. */ export async function cancelablePromiseRace(abortablePromiseBuilders, options) { + var _a, _b; const aborter = new AbortController(); function abortHandler() { aborter.abort(); } - options?.abortSignal?.addEventListener("abort", abortHandler); + (_a = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _a === void 0 ? void 0 : _a.addEventListener("abort", abortHandler); try { return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); } finally { aborter.abort(); - options?.abortSignal?.removeEventListener("abort", abortHandler); + (_b = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _b === void 0 ? void 0 : _b.removeEventListener("abort", abortHandler); } } //# sourceMappingURL=aborterUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/checkEnvironment.js b/node_modules/@azure/core-util/dist/esm/checkEnvironment.js index 53ba62e52..b7feb2085 100644 --- a/node_modules/@azure/core-util/dist/esm/checkEnvironment.js +++ b/node_modules/@azure/core-util/dist/esm/checkEnvironment.js @@ -1,5 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +var _a, _b, _c, _d; /** * A constant that indicates whether the environment the code is running is a Web Browser. */ @@ -9,10 +10,10 @@ export const isBrowser = typeof window !== "undefined" && typeof window.document * A constant that indicates whether the environment the code is running is a Web Worker. */ export const isWebWorker = typeof self === "object" && - typeof self?.importScripts === "function" && - (self.constructor?.name === "DedicatedWorkerGlobalScope" || - self.constructor?.name === "ServiceWorkerGlobalScope" || - self.constructor?.name === "SharedWorkerGlobalScope"); + typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && + (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || + ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || + ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); /** * A constant that indicates whether the environment the code is running is Deno. */ @@ -24,17 +25,23 @@ export const isDeno = typeof Deno !== "undefined" && */ export const isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; /** - * A constant that indicates whether the environment the code is running is Node.JS. + * A constant that indicates whether the environment the code is running is a Node.js compatible environment. */ -export const isNode = typeof globalThis.process !== "undefined" && +export const isNodeLike = typeof globalThis.process !== "undefined" && Boolean(globalThis.process.version) && - Boolean(globalThis.process.versions?.node) && - // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions - !isDeno && - !isBun; + Boolean((_d = globalThis.process.versions) === null || _d === void 0 ? void 0 : _d.node); +/** + * A constant that indicates whether the environment the code is running is a Node.js compatible environment. + * @deprecated Use `isNodeLike` instead. + */ +export const isNode = isNodeLike; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +export const isNodeRuntime = isNodeLike && !isBun && !isDeno; /** * A constant that indicates whether the environment the code is running is in React-Native. */ // https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js -export const isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; +export const isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; //# sourceMappingURL=checkEnvironment.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/createAbortablePromise.js b/node_modules/@azure/core-util/dist/esm/createAbortablePromise.js index fc54e7c74..28c0d5aaa 100644 --- a/node_modules/@azure/core-util/dist/esm/createAbortablePromise.js +++ b/node_modules/@azure/core-util/dist/esm/createAbortablePromise.js @@ -8,20 +8,20 @@ import { AbortError } from "@azure/abort-controller"; * @returns A promise that can be aborted. */ export function createAbortablePromise(buildPromise, options) { - const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {}; + const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; return new Promise((resolve, reject) => { function rejectOnAbort() { - reject(new AbortError(abortErrorMsg ?? "The operation was aborted.")); + reject(new AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted.")); } function removeListeners() { - abortSignal?.removeEventListener("abort", onAbort); + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.removeEventListener("abort", onAbort); } function onAbort() { - cleanupBeforeAbort?.(); + cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 ? void 0 : cleanupBeforeAbort(); removeListeners(); rejectOnAbort(); } - if (abortSignal?.aborted) { + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { return rejectOnAbort(); } try { @@ -36,7 +36,7 @@ export function createAbortablePromise(buildPromise, options) { catch (err) { reject(err); } - abortSignal?.addEventListener("abort", onAbort); + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener("abort", onAbort); }); } //# sourceMappingURL=createAbortablePromise.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/delay.js b/node_modules/@azure/core-util/dist/esm/delay.js index 3e12ea5ed..b7480238c 100644 --- a/node_modules/@azure/core-util/dist/esm/delay.js +++ b/node_modules/@azure/core-util/dist/esm/delay.js @@ -10,13 +10,13 @@ const StandardAbortMessage = "The delay was aborted."; */ export function delay(timeInMs, options) { let token; - const { abortSignal, abortErrorMsg } = options ?? {}; + const { abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; return createAbortablePromise((resolve) => { token = setTimeout(resolve, timeInMs); }, { cleanupBeforeAbort: () => clearTimeout(token), abortSignal, - abortErrorMsg: abortErrorMsg ?? StandardAbortMessage, + abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage, }); } //# sourceMappingURL=delay.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/index.js b/node_modules/@azure/core-util/dist/esm/index.js index 64b7b80ac..19e3b5b23 100644 --- a/node_modules/@azure/core-util/dist/esm/index.js +++ b/node_modules/@azure/core-util/dist/esm/index.js @@ -9,6 +9,6 @@ export { isError, getErrorMessage } from "./error.js"; export { computeSha256Hash, computeSha256Hmac } from "./sha256.js"; export { isDefined, isObjectWithProperties, objectHasProperty } from "./typeGuards.js"; export { randomUUID } from "./uuidUtils.js"; -export { isBrowser, isBun, isNode, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; +export { isBrowser, isBun, isNode, isNodeLike, isNodeRuntime, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; export { uint8ArrayToString, stringToUint8Array } from "./bytesEncoding.js"; //# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/esm/uuidUtils.js b/node_modules/@azure/core-util/dist/esm/uuidUtils.js index f20c10c60..48c0b11cf 100644 --- a/node_modules/@azure/core-util/dist/esm/uuidUtils.js +++ b/node_modules/@azure/core-util/dist/esm/uuidUtils.js @@ -1,8 +1,9 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +var _a; import { randomUUID as v4RandomUUID } from "crypto"; // NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+. -const uuidFunction = typeof globalThis?.crypto?.randomUUID === "function" +const uuidFunction = typeof ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.randomUUID) === "function" ? globalThis.crypto.randomUUID.bind(globalThis.crypto) : v4RandomUUID; /** diff --git a/node_modules/@azure/core-util/dist/react-native/aborterUtils.js b/node_modules/@azure/core-util/dist/react-native/aborterUtils.js index c34a95d10..3da2b2346 100644 --- a/node_modules/@azure/core-util/dist/react-native/aborterUtils.js +++ b/node_modules/@azure/core-util/dist/react-native/aborterUtils.js @@ -4,17 +4,18 @@ * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. */ export async function cancelablePromiseRace(abortablePromiseBuilders, options) { + var _a, _b; const aborter = new AbortController(); function abortHandler() { aborter.abort(); } - options?.abortSignal?.addEventListener("abort", abortHandler); + (_a = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _a === void 0 ? void 0 : _a.addEventListener("abort", abortHandler); try { return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); } finally { aborter.abort(); - options?.abortSignal?.removeEventListener("abort", abortHandler); + (_b = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _b === void 0 ? void 0 : _b.removeEventListener("abort", abortHandler); } } //# sourceMappingURL=aborterUtils.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/checkEnvironment.js b/node_modules/@azure/core-util/dist/react-native/checkEnvironment.js index 53ba62e52..b7feb2085 100644 --- a/node_modules/@azure/core-util/dist/react-native/checkEnvironment.js +++ b/node_modules/@azure/core-util/dist/react-native/checkEnvironment.js @@ -1,5 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +var _a, _b, _c, _d; /** * A constant that indicates whether the environment the code is running is a Web Browser. */ @@ -9,10 +10,10 @@ export const isBrowser = typeof window !== "undefined" && typeof window.document * A constant that indicates whether the environment the code is running is a Web Worker. */ export const isWebWorker = typeof self === "object" && - typeof self?.importScripts === "function" && - (self.constructor?.name === "DedicatedWorkerGlobalScope" || - self.constructor?.name === "ServiceWorkerGlobalScope" || - self.constructor?.name === "SharedWorkerGlobalScope"); + typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && + (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || + ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || + ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); /** * A constant that indicates whether the environment the code is running is Deno. */ @@ -24,17 +25,23 @@ export const isDeno = typeof Deno !== "undefined" && */ export const isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; /** - * A constant that indicates whether the environment the code is running is Node.JS. + * A constant that indicates whether the environment the code is running is a Node.js compatible environment. */ -export const isNode = typeof globalThis.process !== "undefined" && +export const isNodeLike = typeof globalThis.process !== "undefined" && Boolean(globalThis.process.version) && - Boolean(globalThis.process.versions?.node) && - // Deno thought it was a good idea to spoof process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions - !isDeno && - !isBun; + Boolean((_d = globalThis.process.versions) === null || _d === void 0 ? void 0 : _d.node); +/** + * A constant that indicates whether the environment the code is running is a Node.js compatible environment. + * @deprecated Use `isNodeLike` instead. + */ +export const isNode = isNodeLike; +/** + * A constant that indicates whether the environment the code is running is Node.JS. + */ +export const isNodeRuntime = isNodeLike && !isBun && !isDeno; /** * A constant that indicates whether the environment the code is running is in React-Native. */ // https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js -export const isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; +export const isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; //# sourceMappingURL=checkEnvironment.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.js b/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.js index fc54e7c74..28c0d5aaa 100644 --- a/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.js +++ b/node_modules/@azure/core-util/dist/react-native/createAbortablePromise.js @@ -8,20 +8,20 @@ import { AbortError } from "@azure/abort-controller"; * @returns A promise that can be aborted. */ export function createAbortablePromise(buildPromise, options) { - const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options ?? {}; + const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; return new Promise((resolve, reject) => { function rejectOnAbort() { - reject(new AbortError(abortErrorMsg ?? "The operation was aborted.")); + reject(new AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted.")); } function removeListeners() { - abortSignal?.removeEventListener("abort", onAbort); + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.removeEventListener("abort", onAbort); } function onAbort() { - cleanupBeforeAbort?.(); + cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 ? void 0 : cleanupBeforeAbort(); removeListeners(); rejectOnAbort(); } - if (abortSignal?.aborted) { + if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { return rejectOnAbort(); } try { @@ -36,7 +36,7 @@ export function createAbortablePromise(buildPromise, options) { catch (err) { reject(err); } - abortSignal?.addEventListener("abort", onAbort); + abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener("abort", onAbort); }); } //# sourceMappingURL=createAbortablePromise.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/delay.js b/node_modules/@azure/core-util/dist/react-native/delay.js index 3e12ea5ed..b7480238c 100644 --- a/node_modules/@azure/core-util/dist/react-native/delay.js +++ b/node_modules/@azure/core-util/dist/react-native/delay.js @@ -10,13 +10,13 @@ const StandardAbortMessage = "The delay was aborted."; */ export function delay(timeInMs, options) { let token; - const { abortSignal, abortErrorMsg } = options ?? {}; + const { abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; return createAbortablePromise((resolve) => { token = setTimeout(resolve, timeInMs); }, { cleanupBeforeAbort: () => clearTimeout(token), abortSignal, - abortErrorMsg: abortErrorMsg ?? StandardAbortMessage, + abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage, }); } //# sourceMappingURL=delay.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/dist/react-native/index.js b/node_modules/@azure/core-util/dist/react-native/index.js index 64b7b80ac..19e3b5b23 100644 --- a/node_modules/@azure/core-util/dist/react-native/index.js +++ b/node_modules/@azure/core-util/dist/react-native/index.js @@ -9,6 +9,6 @@ export { isError, getErrorMessage } from "./error.js"; export { computeSha256Hash, computeSha256Hmac } from "./sha256.js"; export { isDefined, isObjectWithProperties, objectHasProperty } from "./typeGuards.js"; export { randomUUID } from "./uuidUtils.js"; -export { isBrowser, isBun, isNode, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; +export { isBrowser, isBun, isNode, isNodeLike, isNodeRuntime, isDeno, isReactNative, isWebWorker, } from "./checkEnvironment.js"; export { uint8ArrayToString, stringToUint8Array } from "./bytesEncoding.js"; //# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-util/package.json b/node_modules/@azure/core-util/package.json index 7edd6968e..4d30a040d 100644 --- a/node_modules/@azure/core-util/package.json +++ b/node_modules/@azure/core-util/package.json @@ -1,6 +1,6 @@ { "name": "@azure/core-util", - "version": "1.8.1", + "version": "1.9.0", "description": "Core library for shared utility methods", "sdk-type": "client", "type": "module", @@ -87,7 +87,7 @@ "playwright": "^1.41.2", "prettier": "^3.2.5", "rimraf": "^5.0.5", - "tshy": "^1.11.1", + "tshy": "^1.13.0", "typescript": "~5.3.3", "vitest": "^1.3.1" }, diff --git a/node_modules/@azure/core-xml/dist/browser/index.js b/node_modules/@azure/core-xml/dist/browser/index.js new file mode 100644 index 000000000..e3caeef03 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/browser/index.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { stringifyXML, parseXML } from "./xml.js"; +export { XML_ATTRKEY, XML_CHARKEY } from "./xml.common.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-xml/dist/browser/package.json b/node_modules/@azure/core-xml/dist/browser/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/browser/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-xml/dist/browser/xml.common.js b/node_modules/@azure/core-xml/dist/browser/xml.common.js new file mode 100644 index 000000000..645f88872 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/browser/xml.common.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Default key used to access the XML attributes. + */ +export const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export const XML_CHARKEY = "_"; +//# sourceMappingURL=xml.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-xml/dist/browser/xml.js b/node_modules/@azure/core-xml/dist/browser/xml.js new file mode 100644 index 000000000..ecb570165 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/browser/xml.js @@ -0,0 +1,213 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/// +import { XML_ATTRKEY, XML_CHARKEY } from "./xml.common.js"; +if (!document || !DOMParser || !Node || !XMLSerializer) { + throw new Error(`This library depends on the following DOM objects: ["document", "DOMParser", "Node", "XMLSerializer"] to parse XML, but some of these are undefined. You may provide a polyfill to make these globally available in order to support your environment. For more information, please refer to https://aka.ms/azsdk/js/web-workers. `); +} +// Policy to make our code Trusted Types compliant at running time. +// https://github.com/w3c/webappsec-trusted-types +// We are calling DOMParser.parseFromString() to parse XML payload from Azure services. +// The parsed DOM object is not exposed to outside. Scripts are disabled when parsing +// according to the spec. There are no HTML/XSS security concerns on the usage of +// parseFromString() here. +let ttPolicy; +try { + if (typeof self.trustedTypes !== "undefined") { + ttPolicy = self.trustedTypes.createPolicy("@azure/core-xml#xml.browser", { + createHTML: (s) => s, + }); + } +} +catch (e) { + console.warn('Could not create trusted types policy "@azure/core-xml#xml.browser"'); +} +const doc = document.implementation.createDocument(null, null, null); +const parser = new DOMParser(); +export function parseXML(str, opts = {}) { + var _a, _b, _c, _d, _e, _f; + try { + const updatedOptions = { + rootName: (_a = opts.rootName) !== null && _a !== void 0 ? _a : "", + includeRoot: (_b = opts.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = opts.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + cdataPropName: (_d = opts.cdataPropName) !== null && _d !== void 0 ? _d : "__cdata", + stopNodes: (_e = opts.stopNodes) !== null && _e !== void 0 ? _e : [], + }; + const dom = parser.parseFromString(((_f = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML(str)) !== null && _f !== void 0 ? _f : str), "application/xml"); + throwIfError(dom); + let obj; + if (updatedOptions.includeRoot) { + obj = domToObject(dom, updatedOptions); + } + else { + obj = domToObject(dom.childNodes[0], updatedOptions); + } + return Promise.resolve(obj); + } + catch (err) { + return Promise.reject(err); + } +} +let errorNS; +function getErrorNamespace() { + var _a, _b; + if (errorNS === undefined) { + try { + const invalidXML = ((_a = ttPolicy === null || ttPolicy === void 0 ? void 0 : ttPolicy.createHTML("INVALID")) !== null && _a !== void 0 ? _a : "INVALID"); + errorNS = + (_b = parser.parseFromString(invalidXML, "text/xml").getElementsByTagName("parsererror")[0] + .namespaceURI) !== null && _b !== void 0 ? _b : ""; + } + catch (ignored) { + // Most browsers will return a document containing , but IE will throw. + errorNS = ""; + } + } + return errorNS; +} +function throwIfError(dom) { + const parserErrors = dom.getElementsByTagName("parsererror"); + if (parserErrors.length > 0 && getErrorNamespace()) { + for (let i = 0; i < parserErrors.length; i++) { + if (parserErrors[i].namespaceURI === errorNS) { + throw new Error(parserErrors[i].innerHTML); + } + } + } +} +function isElement(node) { + return !!node.attributes; +} +/** + * Get the Element-typed version of the provided Node if the provided node is an element with + * attributes. If it isn't, then undefined is returned. + */ +function asElementWithAttributes(node) { + return isElement(node) && node.hasAttributes() ? node : undefined; +} +function domToObject(node, options) { + var _a; + let result = {}; + const childNodeCount = node.childNodes.length; + const firstChildNode = node.childNodes[0]; + const onlyChildTextValue = (firstChildNode && + childNodeCount === 1 && + firstChildNode.nodeType === Node.TEXT_NODE && + firstChildNode.nodeValue) || + undefined; + const elementWithAttributes = asElementWithAttributes(node); + if (elementWithAttributes) { + result[XML_ATTRKEY] = {}; + for (let i = 0; i < elementWithAttributes.attributes.length; i++) { + const attr = elementWithAttributes.attributes[i]; + result[XML_ATTRKEY][attr.nodeName] = attr.nodeValue; + } + if (onlyChildTextValue) { + result[options.xmlCharKey] = onlyChildTextValue; + } + } + else if (childNodeCount === 0) { + result = ""; + } + else if (onlyChildTextValue) { + result = onlyChildTextValue; + } + if (!onlyChildTextValue) { + for (let i = 0; i < childNodeCount; i++) { + const child = node.childNodes[i]; + // Check if CData + if ((child === null || child === void 0 ? void 0 : child.nodeType) === Node.CDATA_SECTION_NODE) { + // Already in the CDATA + result = child.textContent; + } + else if (((_a = child === null || child === void 0 ? void 0 : child.firstChild) === null || _a === void 0 ? void 0 : _a.nodeType) === Node.CDATA_SECTION_NODE) { + // Look if child is CDATA + result[child.nodeName] = child.textContent; + } + else if (child.nodeType !== Node.TEXT_NODE) { + // Ignore leading/trailing whitespace nodes + const childObject = domToObject(child, options); + if (!result[child.nodeName]) { + result[child.nodeName] = childObject; + } + else if (Array.isArray(result[child.nodeName])) { + result[child.nodeName].push(childObject); + } + else { + result[child.nodeName] = [result[child.nodeName], childObject]; + } + } + } + } + return result; +} +const serializer = new XMLSerializer(); +export function stringifyXML(content, opts = {}) { + var _a, _b, _c, _d, _e; + const updatedOptions = { + rootName: (_a = opts.rootName) !== null && _a !== void 0 ? _a : "root", + includeRoot: (_b = opts.includeRoot) !== null && _b !== void 0 ? _b : false, + xmlCharKey: (_c = opts.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, + cdataPropName: (_d = opts.cdataPropName) !== null && _d !== void 0 ? _d : "__cdata", + stopNodes: (_e = opts.stopNodes) !== null && _e !== void 0 ? _e : [], + }; + const dom = buildNode(content, updatedOptions.rootName, updatedOptions)[0]; + return ('' + serializer.serializeToString(dom)); +} +function buildAttributes(attrs) { + const result = []; + for (const key of Object.keys(attrs)) { + const attr = doc.createAttribute(key); + attr.value = attrs[key].toString(); + result.push(attr); + } + return result; +} +function buildNode(obj, elementName, options) { + if (obj === undefined || + obj === null || + typeof obj === "string" || + typeof obj === "number" || + typeof obj === "boolean") { + const elem = doc.createElement(elementName); + elem.textContent = obj === undefined || obj === null ? "" : obj.toString(); + return [elem]; + } + else if (Array.isArray(obj)) { + const result = []; + for (const arrayElem of obj) { + for (const child of buildNode(arrayElem, elementName, options)) { + result.push(child); + } + } + return result; + } + else if (typeof obj === "object") { + const elem = doc.createElement(elementName); + for (const key of Object.keys(obj)) { + if (key === XML_ATTRKEY) { + for (const attr of buildAttributes(obj[key])) { + elem.attributes.setNamedItem(attr); + } + } + else if (key === options.xmlCharKey) { + elem.textContent = obj[key].toString(); + } + else if (key === options.cdataPropName) { + const cdataElement = doc.createCDATASection(obj[key].toString()); + elem.appendChild(cdataElement); + } + else { + for (const child of buildNode(obj[key], key, options)) { + elem.appendChild(child); + } + } + } + return [elem]; + } + else { + throw new Error(`Illegal value passed to buildObject: ${obj}`); + } +} +//# sourceMappingURL=xml-browser.mjs.map \ No newline at end of file diff --git a/node_modules/@azure/core-xml/dist/commonjs/index.js b/node_modules/@azure/core-xml/dist/commonjs/index.js new file mode 100644 index 000000000..987a905d0 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/commonjs/index.js @@ -0,0 +1,12 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.XML_CHARKEY = exports.XML_ATTRKEY = exports.parseXML = exports.stringifyXML = void 0; +var xml_js_1 = require("./xml.js"); +Object.defineProperty(exports, "stringifyXML", { enumerable: true, get: function () { return xml_js_1.stringifyXML; } }); +Object.defineProperty(exports, "parseXML", { enumerable: true, get: function () { return xml_js_1.parseXML; } }); +var xml_common_js_1 = require("./xml.common.js"); +Object.defineProperty(exports, "XML_ATTRKEY", { enumerable: true, get: function () { return xml_common_js_1.XML_ATTRKEY; } }); +Object.defineProperty(exports, "XML_CHARKEY", { enumerable: true, get: function () { return xml_common_js_1.XML_CHARKEY; } }); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-xml/dist/commonjs/package.json b/node_modules/@azure/core-xml/dist/commonjs/package.json new file mode 100644 index 000000000..5bbefffba --- /dev/null +++ b/node_modules/@azure/core-xml/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@azure/core-xml/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/core-xml/dist/commonjs/tsdoc-metadata.json new file mode 100644 index 000000000..6305f1798 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/commonjs/tsdoc-metadata.json @@ -0,0 +1,11 @@ +// This file is read by tools that parse documentation comments conforming to the TSDoc standard. +// It should be published with your NPM package. It should not be tracked by Git. +{ + "tsdocVersion": "0.12", + "toolPackages": [ + { + "packageName": "@microsoft/api-extractor", + "packageVersion": "7.43.1" + } + ] +} diff --git a/node_modules/@azure/core-xml/dist/commonjs/xml.common.js b/node_modules/@azure/core-xml/dist/commonjs/xml.common.js new file mode 100644 index 000000000..9d4c7661f --- /dev/null +++ b/node_modules/@azure/core-xml/dist/commonjs/xml.common.js @@ -0,0 +1,14 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.XML_CHARKEY = exports.XML_ATTRKEY = void 0; +/** + * Default key used to access the XML attributes. + */ +exports.XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +exports.XML_CHARKEY = "_"; +//# sourceMappingURL=xml.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-xml/dist/commonjs/xml.js b/node_modules/@azure/core-xml/dist/commonjs/xml.js new file mode 100644 index 000000000..146a96393 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/commonjs/xml.js @@ -0,0 +1,68 @@ +"use strict"; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseXML = exports.stringifyXML = void 0; +const fast_xml_parser_1 = require("fast-xml-parser"); +const xml_common_js_1 = require("./xml.common.js"); +function getCommonOptions(options) { + var _a; + return { + attributesGroupName: xml_common_js_1.XML_ATTRKEY, + textNodeName: (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : xml_common_js_1.XML_CHARKEY, + ignoreAttributes: false, + suppressBooleanAttributes: false, + }; +} +function getSerializerOptions(options = {}) { + var _a, _b; + return Object.assign(Object.assign({}, getCommonOptions(options)), { attributeNamePrefix: "@_", format: true, suppressEmptyNode: true, indentBy: "", rootNodeName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "root", cdataPropName: (_b = options.cdataPropName) !== null && _b !== void 0 ? _b : "__cdata" }); +} +function getParserOptions(options = {}) { + return Object.assign(Object.assign({}, getCommonOptions(options)), { parseAttributeValue: false, parseTagValue: false, attributeNamePrefix: "", stopNodes: options.stopNodes, processEntities: true }); +} +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the XML building of given JSON object + * `rootName` indicates the name of the root element in the resulting XML + */ +function stringifyXML(obj, opts = {}) { + const parserOptions = getSerializerOptions(opts); + const j2x = new fast_xml_parser_1.XMLBuilder(parserOptions); + const node = { [parserOptions.rootNodeName]: obj }; + const xmlData = j2x.build(node); + return `${xmlData}`.replace(/\n/g, ""); +} +exports.stringifyXML = stringifyXML; +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + * `includeRoot` indicates whether the root element is to be included or not in the output + */ +async function parseXML(str, opts = {}) { + if (!str) { + throw new Error("Document is empty"); + } + const validation = fast_xml_parser_1.XMLValidator.validate(str); + if (validation !== true) { + throw validation; + } + const parser = new fast_xml_parser_1.XMLParser(getParserOptions(opts)); + const parsedXml = parser.parse(str); + // Remove the node. + // This is a change in behavior on fxp v4. Issue #424 + if (parsedXml["?xml"]) { + delete parsedXml["?xml"]; + } + if (!opts.includeRoot) { + for (const key of Object.keys(parsedXml)) { + const value = parsedXml[key]; + return typeof value === "object" ? Object.assign({}, value) : value; + } + } + return parsedXml; +} +exports.parseXML = parseXML; +//# sourceMappingURL=xml.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-xml/dist/esm/index.js b/node_modules/@azure/core-xml/dist/esm/index.js new file mode 100644 index 000000000..e3caeef03 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/esm/index.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { stringifyXML, parseXML } from "./xml.js"; +export { XML_ATTRKEY, XML_CHARKEY } from "./xml.common.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-xml/dist/esm/package.json b/node_modules/@azure/core-xml/dist/esm/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-xml/dist/esm/xml.common.js b/node_modules/@azure/core-xml/dist/esm/xml.common.js new file mode 100644 index 000000000..645f88872 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/esm/xml.common.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Default key used to access the XML attributes. + */ +export const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export const XML_CHARKEY = "_"; +//# sourceMappingURL=xml.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-xml/dist/esm/xml.js b/node_modules/@azure/core-xml/dist/esm/xml.js new file mode 100644 index 000000000..c616b462f --- /dev/null +++ b/node_modules/@azure/core-xml/dist/esm/xml.js @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { XMLBuilder, XMLParser, XMLValidator } from "fast-xml-parser"; +import { XML_ATTRKEY, XML_CHARKEY } from "./xml.common.js"; +function getCommonOptions(options) { + var _a; + return { + attributesGroupName: XML_ATTRKEY, + textNodeName: (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY, + ignoreAttributes: false, + suppressBooleanAttributes: false, + }; +} +function getSerializerOptions(options = {}) { + var _a, _b; + return Object.assign(Object.assign({}, getCommonOptions(options)), { attributeNamePrefix: "@_", format: true, suppressEmptyNode: true, indentBy: "", rootNodeName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "root", cdataPropName: (_b = options.cdataPropName) !== null && _b !== void 0 ? _b : "__cdata" }); +} +function getParserOptions(options = {}) { + return Object.assign(Object.assign({}, getCommonOptions(options)), { parseAttributeValue: false, parseTagValue: false, attributeNamePrefix: "", stopNodes: options.stopNodes, processEntities: true }); +} +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the XML building of given JSON object + * `rootName` indicates the name of the root element in the resulting XML + */ +export function stringifyXML(obj, opts = {}) { + const parserOptions = getSerializerOptions(opts); + const j2x = new XMLBuilder(parserOptions); + const node = { [parserOptions.rootNodeName]: obj }; + const xmlData = j2x.build(node); + return `${xmlData}`.replace(/\n/g, ""); +} +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + * `includeRoot` indicates whether the root element is to be included or not in the output + */ +export async function parseXML(str, opts = {}) { + if (!str) { + throw new Error("Document is empty"); + } + const validation = XMLValidator.validate(str); + if (validation !== true) { + throw validation; + } + const parser = new XMLParser(getParserOptions(opts)); + const parsedXml = parser.parse(str); + // Remove the node. + // This is a change in behavior on fxp v4. Issue #424 + if (parsedXml["?xml"]) { + delete parsedXml["?xml"]; + } + if (!opts.includeRoot) { + for (const key of Object.keys(parsedXml)) { + const value = parsedXml[key]; + return typeof value === "object" ? Object.assign({}, value) : value; + } + } + return parsedXml; +} +//# sourceMappingURL=xml.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-xml/dist/react-native/index.js b/node_modules/@azure/core-xml/dist/react-native/index.js new file mode 100644 index 000000000..e3caeef03 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/react-native/index.js @@ -0,0 +1,5 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +export { stringifyXML, parseXML } from "./xml.js"; +export { XML_ATTRKEY, XML_CHARKEY } from "./xml.common.js"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-xml/dist/react-native/package.json b/node_modules/@azure/core-xml/dist/react-native/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/react-native/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@azure/core-xml/dist/react-native/xml.common.js b/node_modules/@azure/core-xml/dist/react-native/xml.common.js new file mode 100644 index 000000000..645f88872 --- /dev/null +++ b/node_modules/@azure/core-xml/dist/react-native/xml.common.js @@ -0,0 +1,11 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Default key used to access the XML attributes. + */ +export const XML_ATTRKEY = "$"; +/** + * Default key used to access the XML value content. + */ +export const XML_CHARKEY = "_"; +//# sourceMappingURL=xml.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-xml/dist/react-native/xml.js b/node_modules/@azure/core-xml/dist/react-native/xml.js new file mode 100644 index 000000000..c616b462f --- /dev/null +++ b/node_modules/@azure/core-xml/dist/react-native/xml.js @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { XMLBuilder, XMLParser, XMLValidator } from "fast-xml-parser"; +import { XML_ATTRKEY, XML_CHARKEY } from "./xml.common.js"; +function getCommonOptions(options) { + var _a; + return { + attributesGroupName: XML_ATTRKEY, + textNodeName: (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY, + ignoreAttributes: false, + suppressBooleanAttributes: false, + }; +} +function getSerializerOptions(options = {}) { + var _a, _b; + return Object.assign(Object.assign({}, getCommonOptions(options)), { attributeNamePrefix: "@_", format: true, suppressEmptyNode: true, indentBy: "", rootNodeName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "root", cdataPropName: (_b = options.cdataPropName) !== null && _b !== void 0 ? _b : "__cdata" }); +} +function getParserOptions(options = {}) { + return Object.assign(Object.assign({}, getCommonOptions(options)), { parseAttributeValue: false, parseTagValue: false, attributeNamePrefix: "", stopNodes: options.stopNodes, processEntities: true }); +} +/** + * Converts given JSON object to XML string + * @param obj - JSON object to be converted into XML string + * @param opts - Options that govern the XML building of given JSON object + * `rootName` indicates the name of the root element in the resulting XML + */ +export function stringifyXML(obj, opts = {}) { + const parserOptions = getSerializerOptions(opts); + const j2x = new XMLBuilder(parserOptions); + const node = { [parserOptions.rootNodeName]: obj }; + const xmlData = j2x.build(node); + return `${xmlData}`.replace(/\n/g, ""); +} +/** + * Converts given XML string into JSON + * @param str - String containing the XML content to be parsed into JSON + * @param opts - Options that govern the parsing of given xml string + * `includeRoot` indicates whether the root element is to be included or not in the output + */ +export async function parseXML(str, opts = {}) { + if (!str) { + throw new Error("Document is empty"); + } + const validation = XMLValidator.validate(str); + if (validation !== true) { + throw validation; + } + const parser = new XMLParser(getParserOptions(opts)); + const parsedXml = parser.parse(str); + // Remove the node. + // This is a change in behavior on fxp v4. Issue #424 + if (parsedXml["?xml"]) { + delete parsedXml["?xml"]; + } + if (!opts.includeRoot) { + for (const key of Object.keys(parsedXml)) { + const value = parsedXml[key]; + return typeof value === "object" ? Object.assign({}, value) : value; + } + } + return parsedXml; +} +//# sourceMappingURL=xml.js.map \ No newline at end of file diff --git a/node_modules/@azure/core-xml/package.json b/node_modules/@azure/core-xml/package.json new file mode 100644 index 000000000..15ee4a1b3 --- /dev/null +++ b/node_modules/@azure/core-xml/package.json @@ -0,0 +1,112 @@ +{ + "name": "@azure/core-xml", + "version": "1.4.2", + "description": "Core library for interacting with XML payloads", + "sdk-type": "client", + "type": "module", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "browser": "./dist/browser/index.js", + "exports": { + "./package.json": "./package.json", + ".": { + "browser": { + "types": "./dist/browser/index.d.ts", + "default": "./dist/browser/index.js" + }, + "react-native": { + "types": "./dist/react-native/index.d.ts", + "default": "./dist/react-native/index.js" + }, + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "files": [ + "dist/", + "README.md", + "LICENSE" + ], + "repository": "github:Azure/azure-sdk-for-js", + "keywords": [ + "azure", + "cloud" + ], + "author": "Microsoft Corporation", + "license": "MIT", + "bugs": { + "url": "https://github.com/Azure/azure-sdk-for-js/issues" + }, + "engines": { + "node": ">=18.0.0" + }, + "homepage": "https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/core/core-xml/", + "sideEffects": false, + "prettier": "@azure/eslint-plugin-azure-sdk/prettier.json", + "scripts": { + "build:samples": "echo Obsolete", + "build:test": "npm run clean && tshy && dev-tool run build-test", + "build": "npm run clean && tshy && api-extractor run --local", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,json}\"", + "clean": "rimraf --glob dist dist-* temp types *.tgz *.log", + "execute:samples": "dev-tool samples run samples-dev", + "extract-api": "tshy && api-extractor run --local", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.{ts,cts,mts}\" \"test/**/*.{ts,cts,mts}\" \"*.{js,json}\"", + "integration-test:browser": "echo skipped", + "integration-test:node": "echo skipped", + "integration-test": "npm run integration-test:node && npm run integration-test:browser", + "lint:fix": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts --fix --fix-type [problem,suggestion]", + "lint": "eslint package.json api-extractor.json src test --ext .ts --ext .cts --ext .mts", + "pack": "npm pack 2>&1", + "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser && npm run integration-test:browser", + "test:node": "npm run clean && tshy && npm run unit-test:node && npm run integration-test:node", + "test": "npm run clean && tshy && npm run unit-test:node && dev-tool run build-test && npm run unit-test:browser && npm run integration-test", + "unit-test:browser": "npm run build:test && dev-tool run test:vitest --no-test-proxy --browser", + "unit-test:node": "dev-tool run test:vitest --no-test-proxy", + "unit-test": "npm run unit-test:node && npm run unit-test:browser" + }, + "dependencies": { + "tslib": "^2.6.2", + "fast-xml-parser": "^4.3.2" + }, + "devDependencies": { + "@azure/dev-tool": "^1.0.0", + "@azure/eslint-plugin-azure-sdk": "^3.0.0", + "@microsoft/api-extractor": "^7.40.3", + "@types/node": "^18.0.0", + "@types/trusted-types": "^2.0.0", + "@vitest/browser": "^1.3.1", + "@vitest/coverage-istanbul": "^1.3.1", + "eslint": "^8.56.0", + "playwright": "^1.41.2", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tshy": "^1.13.0", + "typescript": "~5.3.3", + "vitest": "^1.3.1" + }, + "//metadata": { + "migrationDate": "2023-03-08T18:36:03.000Z" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + }, + "dialects": [ + "esm", + "commonjs" + ], + "esmDialects": [ + "browser", + "react-native" + ], + "selfLink": false + } +} diff --git a/node_modules/@azure/logger/dist/commonjs/tsdoc-metadata.json b/node_modules/@azure/logger/dist/commonjs/tsdoc-metadata.json index 22735db17..6305f1798 100644 --- a/node_modules/@azure/logger/dist/commonjs/tsdoc-metadata.json +++ b/node_modules/@azure/logger/dist/commonjs/tsdoc-metadata.json @@ -5,7 +5,7 @@ "toolPackages": [ { "packageName": "@microsoft/api-extractor", - "packageVersion": "7.42.3" + "packageVersion": "7.43.1" } ] } diff --git a/node_modules/@azure/logger/package.json b/node_modules/@azure/logger/package.json index 8a6059b6e..c7ac95324 100644 --- a/node_modules/@azure/logger/package.json +++ b/node_modules/@azure/logger/package.json @@ -1,7 +1,7 @@ { "name": "@azure/logger", "sdk-type": "client", - "version": "1.1.1", + "version": "1.1.2", "description": "Microsoft Azure SDK for JavaScript - Logger", "type": "module", "main": "./dist/commonjs/index.js", @@ -93,7 +93,7 @@ "playwright": "^1.41.2", "prettier": "^3.2.5", "rimraf": "^5.0.5", - "tshy": "^1.11.1", + "tshy": "^1.13.0", "typescript": "~5.3.3", "vitest": "^1.3.1" }, diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js index 1d083adca..c96e0eb81 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BatchResponseParser.js @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { HttpHeaders } from "@azure/core-http"; +import { createHttpHeaders } from "@azure/core-rest-pipeline"; +import { toHttpHeadersLike } from "@azure/core-http-compat"; import { HTTP_VERSION_1_1, HTTP_LINE_ENDING, HeaderConstants, HTTPURLConnection, } from "./utils/constants"; import { getBodyAsText } from "./BatchUtils"; import { logger } from "./log"; @@ -53,7 +54,7 @@ export class BatchResponseParser { for (let index = 0; index < subResponseCount; index++) { const subResponse = subResponses[index]; const deserializedSubResponse = {}; - deserializedSubResponse.headers = new HttpHeaders(); + deserializedSubResponse.headers = toHttpHeadersLike(createHttpHeaders()); const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); let subRespHeaderStartFound = false; let subRespHeaderEndFound = false; diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js index 6de406bf9..43f254d91 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatch.js @@ -1,15 +1,20 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { BaseRequestPolicy, deserializationPolicy, generateUuid, HttpHeaders, WebResource, isTokenCredential, bearerTokenAuthenticationPolicy, isNode, } from "@azure/core-http"; -import { SpanStatusCode } from "@azure/core-tracing"; +import { randomUUID } from "@azure/core-util"; +import { isTokenCredential } from "@azure/core-auth"; +import { bearerTokenAuthenticationPolicy, createEmptyPipeline, createHttpHeaders, } from "@azure/core-rest-pipeline"; +import { isNode } from "@azure/core-util"; import { AnonymousCredential } from "./credentials/AnonymousCredential"; import { BlobClient } from "./Clients"; import { Mutex } from "./utils/Mutex"; import { Pipeline } from "./Pipeline"; -import { attachCredential, getURLPath, getURLPathAndQuery, iEqual } from "./utils/utils.common"; +import { getURLPath, getURLPathAndQuery, iEqual } from "./utils/utils.common"; +import { stringifyXML } from "@azure/core-xml"; import { HeaderConstants, BATCH_MAX_REQUEST, HTTP_VERSION_1_1, HTTP_LINE_ENDING, StorageOAuthScopes, } from "./utils/constants"; import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; -import { createSpan } from "./utils/tracing"; +import { tracingClient } from "./utils/tracing"; +import { authorizeRequestOnTenantChallenge, serializationPolicy } from "@azure/core-client"; +import { storageSharedKeyCredentialPolicy } from "./policies/StorageSharedKeyCredentialPolicyV2"; /** * A BlobBatch represents an aggregated set of operations on blobs. * Currently, only `delete` and `setAccessTier` are supported. @@ -81,8 +86,7 @@ export class BlobBatch { if (!options) { options = {}; } - const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); - try { + return tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("delete"); await this.addSubRequestInternal({ url: url, @@ -90,17 +94,7 @@ export class BlobBatch { }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { let url; @@ -128,8 +122,7 @@ export class BlobBatch { if (!options) { options = {}; } - const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); - try { + return tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("setAccessTier"); await this.addSubRequestInternal({ url: url, @@ -137,17 +130,7 @@ export class BlobBatch { }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); }); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } } /** @@ -158,7 +141,7 @@ class InnerBatchRequest { constructor() { this.operationCount = 0; this.body = ""; - const tempGuid = generateUuid(); + const tempGuid = randomUUID(); // batch_{batchid} this.boundary = `batch_${tempGuid}`; // --batch_{batchid} @@ -179,29 +162,48 @@ class InnerBatchRequest { * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. */ createPipeline(credential) { - const isAnonymousCreds = credential instanceof AnonymousCredential; - const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] - const factories = new Array(policyFactoryLength); - factories[0] = deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer - factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers - if (!isAnonymousCreds) { - factories[2] = isTokenCredential(credential) - ? attachCredential(bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) - : credential; + const corePipeline = createEmptyPipeline(); + corePipeline.addPolicy(serializationPolicy({ + stringifyXML, + serializerOptions: { + xml: { + xmlCharKey: "#", + }, + }, + }), { phase: "Serialize" }); + // Use batch header filter policy to exclude unnecessary headers + corePipeline.addPolicy(batchHeaderFilterPolicy()); + // Use batch assemble policy to assemble request and intercept request from going to wire + corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); + if (isTokenCredential(credential)) { + corePipeline.addPolicy(bearerTokenAuthenticationPolicy({ + credential, + scopes: StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: authorizeRequestOnTenantChallenge }, + }), { phase: "Sign" }); + } + else if (credential instanceof StorageSharedKeyCredential) { + corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + accountName: credential.accountName, + accountKey: credential.accountKey, + }), { phase: "Sign" }); } - factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire - return new Pipeline(factories, {}); + const pipeline = new Pipeline([]); + // attach the v2 pipeline to this one + pipeline._credential = credential; + pipeline._corePipeline = corePipeline; + return pipeline; } appendSubRequestToBody(request) { // Start to assemble sub request this.body += [ - this.subRequestPrefix, - `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, - "", + this.subRequestPrefix, // sub request constant prefix + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID + "", // empty line after sub request's content ID `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method ].join(HTTP_LINE_ENDING); - for (const header of request.headers.headersArray()) { - this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + for (const [name, value] of request.headers) { + this.body += `${name}: ${value}${HTTP_LINE_ENDING}`; } this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line // No body to assemble for current batch request support @@ -232,51 +234,34 @@ class InnerBatchRequest { return this.subRequests; } } -class BatchRequestAssemblePolicy extends BaseRequestPolicy { - constructor(batchRequest, nextPolicy, options) { - super(nextPolicy, options); - this.dummyResponse = { - request: new WebResource(), - status: 200, - headers: new HttpHeaders(), - }; - this.batchRequest = batchRequest; - } - async sendRequest(request) { - await this.batchRequest.appendSubRequestToBody(request); - return this.dummyResponse; // Intercept request from going to wire - } -} -class BatchRequestAssemblePolicyFactory { - constructor(batchRequest) { - this.batchRequest = batchRequest; - } - create(nextPolicy, options) { - return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); - } +function batchRequestAssemblePolicy(batchRequest) { + return { + name: "batchRequestAssemblePolicy", + async sendRequest(request) { + batchRequest.appendSubRequestToBody(request); + return { + request, + status: 200, + headers: createHttpHeaders(), + }; + }, + }; } -class BatchHeaderFilterPolicy extends BaseRequestPolicy { - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(request) { - let xMsHeaderName = ""; - for (const header of request.headers.headersArray()) { - if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { - xMsHeaderName = header.name; +function batchHeaderFilterPolicy() { + return { + name: "batchHeaderFilterPolicy", + async sendRequest(request, next) { + let xMsHeaderName = ""; + for (const [name] of request.headers) { + if (iEqual(name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = name; + } } - } - if (xMsHeaderName !== "") { - request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. - } - return this._nextPolicy.sendRequest(request); - } -} -class BatchHeaderFilterPolicyFactory { - create(nextPolicy, options) { - return new BatchHeaderFilterPolicy(nextPolicy, options); - } + if (xMsHeaderName !== "") { + request.headers.delete(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return next(request); + }, + }; } //# sourceMappingURL=BlobBatch.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js index f15f53dac..736070539 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobBatchClient.js @@ -3,13 +3,11 @@ import { BatchResponseParser } from "./BatchResponseParser"; import { utf8ByteLength } from "./BatchUtils"; import { BlobBatch } from "./BlobBatch"; -import { SpanStatusCode } from "@azure/core-tracing"; -import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; -import { Service, Container } from "./generated/src/operations"; +import { tracingClient } from "./utils/tracing"; import { AnonymousCredential } from "./credentials/AnonymousCredential"; -import { StorageClientContext } from "./generated/src/storageClientContext"; -import { newPipeline, isPipelineLike } from "./Pipeline"; -import { getURLPath } from "./utils/utils.common"; +import { StorageContextClient } from "./StorageContextClient"; +import { newPipeline, isPipelineLike, getCoreClientOptions, } from "./Pipeline"; +import { assertResponse, getURLPath } from "./utils/utils.common"; /** * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. * @@ -31,14 +29,14 @@ export class BlobBatchClient { else { pipeline = newPipeline(credentialOrPipeline, options); } - const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); + const storageClientContext = new StorageContextClient(url, getCoreClientOptions(pipeline)); const path = getURLPath(url); if (path && path !== "/") { // Container scoped. - this.serviceOrContainerContext = new Container(storageClientContext); + this.serviceOrContainerContext = storageClientContext.container; } else { - this.serviceOrContainerContext = new Service(storageClientContext); + this.serviceOrContainerContext = storageClientContext.service; } } /** @@ -117,11 +115,10 @@ export class BlobBatchClient { if (!batchRequest || batchRequest.getSubRequests().size === 0) { throw new RangeError("Batch request should contain one or more sub requests."); } - const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); - try { + return tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { const batchRequestBody = batchRequest.getHttpRequestBody(); // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. - const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + const rawBatchResponse = assertResponse(await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign({}, updatedOptions))); // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); const responseSummary = await batchResponseParser.parseBatchResponse(); @@ -137,17 +134,7 @@ export class BlobBatchClient { subResponsesFailedCount: responseSummary.subResponsesFailedCount, }; return res; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } } //# sourceMappingURL=BlobBatchClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js index 2c8c0e133..144512c64 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobDownloadResponse.js @@ -1,6 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { isNode } from "@azure/core-http"; +import { isNode } from "@azure/core-util"; import { RetriableReadableStream, } from "./utils/RetriableReadableStream"; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. @@ -13,19 +13,6 @@ import { RetriableReadableStream, } from "./utils/RetriableReadableStream"; * Readable stream. */ export class BlobDownloadResponse { - /** - * Creates an instance of BlobDownloadResponse. - * - * @param originalResponse - - * @param getter - - * @param offset - - * @param count - - * @param options - - */ - constructor(originalResponse, getter, offset, count, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); - } /** * Indicates that the service supports * requests for partial file content. @@ -459,5 +446,18 @@ export class BlobDownloadResponse { get _response() { return this.originalResponse._response; } + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } } //# sourceMappingURL=BlobDownloadResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js index 796c15b78..12f2f9bed 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobLeaseClient.js @@ -1,52 +1,50 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { generateUuid } from "@azure/core-http"; -import { StorageClientContext } from "./generated/src/index"; -import { SpanStatusCode } from "@azure/core-tracing"; -import { Blob as StorageBlob, Container } from "./generated/src/operations"; +import { randomUUID } from "@azure/core-util"; import { ETagNone } from "./utils/constants"; -import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +import { tracingClient } from "./utils/tracing"; +import { assertResponse } from "./utils/utils.common"; /** * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. */ export class BlobLeaseClient { + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } /** * Creates an instance of BlobLeaseClient. * @param client - The client to make the lease operation requests. * @param leaseId - Initial proposed lease id. */ constructor(client, leaseId) { - const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); + const clientContext = client.storageClientContext; this._url = client.url; if (client.name === undefined) { this._isContainer = true; - this._containerOrBlobOperation = new Container(clientContext); + this._containerOrBlobOperation = clientContext.container; } else { this._isContainer = false; - this._containerOrBlobOperation = new StorageBlob(clientContext); + this._containerOrBlobOperation = clientContext.blob; } if (!leaseId) { - leaseId = generateUuid(); + leaseId = randomUUID(); } this._leaseId = leaseId; } - /** - * Gets the lease Id. - * - * @readonly - */ - get leaseId() { - return this._leaseId; - } - /** - * Gets the url. - * - * @readonly - */ - get url() { - return this._url; - } /** * Establishes and manages a lock on a container for delete operations, or on a blob * for write and delete operations. @@ -60,27 +58,23 @@ export class BlobLeaseClient { * @returns Response data for acquire lease operation. */ async acquireLease(duration, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); + var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - try { - return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { + var _a; + return assertResponse(await this._containerOrBlobOperation.acquireLease({ + abortSignal: options.abortSignal, + duration, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + proposedLeaseId: this._leaseId, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * To change the ID of the lease. @@ -93,29 +87,23 @@ export class BlobLeaseClient { * @returns Response data for change lease operation. */ async changeLease(proposedLeaseId, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); + var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - try { - const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); this._leaseId = proposedLeaseId; return response; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * To free the lease if it is no longer needed so that another client may @@ -128,27 +116,21 @@ export class BlobLeaseClient { * @returns Response data for release lease operation. */ async releaseLease(options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); + var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - try { - return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { + var _a; + return assertResponse(await this._containerOrBlobOperation.releaseLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * To renew the lease. @@ -160,27 +142,21 @@ export class BlobLeaseClient { * @returns Response data for renew lease operation. */ async renewLease(options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); + var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - try { - return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, + return tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { + var _a; + return this._containerOrBlobOperation.renewLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, }); - throw e; - } - finally { - span.end(); - } + }); } /** * To end the lease but ensure that another client cannot acquire a new lease @@ -194,28 +170,23 @@ export class BlobLeaseClient { * @returns Response data for break lease operation. */ async breakLease(breakPeriod, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); + var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - try { - const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); - return await this._containerOrBlobOperation.breakLease(operationOptions); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { + var _a; + const operationOptions = { + abortSignal: options.abortSignal, + breakPeriod, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + }; + return assertResponse(await this._containerOrBlobOperation.breakLease(operationOptions)); + }); } } //# sourceMappingURL=BlobLeaseClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js index f78c5b50c..93fa07c0f 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.browser.js @@ -7,15 +7,6 @@ * parse avor data returned by blob query. */ export class BlobQueryResponse { - /** - * Creates an instance of BlobQueryResponse. - * - * @param originalResponse - - * @param options - - */ - constructor(originalResponse, _options = {}) { - this.originalResponse = originalResponse; - } /** * Indicates that the service supports * requests for partial file content. @@ -358,5 +349,14 @@ export class BlobQueryResponse { get _response() { return this.originalResponse._response; } + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, _options = {}) { + this.originalResponse = originalResponse; + } } //# sourceMappingURL=BlobQueryResponse.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js index 91dac6435..b3c57648f 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobQueryResponse.js @@ -1,6 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { isNode } from "@azure/core-http"; +import { isNode } from "@azure/core-util"; import { BlobQuickQueryStream } from "./utils/BlobQuickQueryStream"; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. @@ -9,16 +9,6 @@ import { BlobQuickQueryStream } from "./utils/BlobQuickQueryStream"; * parse avor data returned by blob query. */ export class BlobQueryResponse { - /** - * Creates an instance of BlobQueryResponse. - * - * @param originalResponse - - * @param options - - */ - constructor(originalResponse, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); - } /** * Indicates that the service supports * requests for partial file content. @@ -363,5 +353,15 @@ export class BlobQueryResponse { get _response() { return this.originalResponse._response; } + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + } } //# sourceMappingURL=BlobQueryResponse.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js index 5095435e7..88fd14e03 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/BlobServiceClient.js @@ -1,17 +1,16 @@ import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { isTokenCredential, isNode, getDefaultProxySettings, } from "@azure/core-http"; -import { SpanStatusCode } from "@azure/core-tracing"; -import { Container, Service } from "./generated/src/operations"; +import { isTokenCredential } from "@azure/core-auth"; +import { getDefaultProxySettings } from "@azure/core-rest-pipeline"; +import { isNode } from "@azure/core-util"; import { newPipeline, isPipelineLike } from "./Pipeline"; import { ContainerClient, } from "./ContainerClient"; import { appendToURLPath, appendToURLQuery, extractConnectionStringParts, toTags, } from "./utils/utils.common"; import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; import { AnonymousCredential } from "./credentials/AnonymousCredential"; -import "@azure/core-paging"; -import { truncatedISO8061Date } from "./utils/utils.common"; -import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; +import { truncatedISO8061Date, assertResponse } from "./utils/utils.common"; +import { tracingClient } from "./utils/tracing"; import { BlobBatchClient } from "./BlobBatchClient"; import { StorageClient } from "./StorageClient"; import { AccountSASPermissions } from "./sas/AccountSASPermissions"; @@ -22,26 +21,6 @@ import { AccountSASServices } from "./sas/AccountSASServices"; * to manipulate blob containers. */ export class BlobServiceClient extends StorageClient { - constructor(url, credentialOrPipeline, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - let pipeline; - if (isPipelineLike(credentialOrPipeline)) { - pipeline = credentialOrPipeline; - } - else if ((isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || - credentialOrPipeline instanceof AnonymousCredential || - isTokenCredential(credentialOrPipeline)) { - pipeline = newPipeline(credentialOrPipeline, options); - } - else { - // The second parameter is undefined. Use anonymous credential - pipeline = newPipeline(new AnonymousCredential(), options); - } - super(url, pipeline); - this.serviceContext = new Service(this.storageClientContext); - } /** * * Creates an instance of BlobServiceClient from connection string. @@ -81,6 +60,26 @@ export class BlobServiceClient extends StorageClient { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = this.storageClientContext.service; + } /** * Creates a {@link ContainerClient} object * @@ -104,25 +103,14 @@ export class BlobServiceClient extends StorageClient { * @returns Container creation response and the corresponding container client. */ async createContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); - try { + return tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); const containerCreateResponse = await containerClient.create(updatedOptions); return { containerClient, containerCreateResponse, }; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Deletes a Blob container. @@ -132,21 +120,10 @@ export class BlobServiceClient extends StorageClient { * @returns Container deletion response. */ async deleteContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); - try { + return tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); - return await containerClient.delete(updatedOptions); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return containerClient.delete(updatedOptions); + }); } /** * Restore a previously deleted Blob container. @@ -158,25 +135,17 @@ export class BlobServiceClient extends StorageClient { * @returns Container deletion response. */ async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); - try { + return tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); // Hack to access a protected member. - const containerContext = new Container(containerClient["storageClientContext"]); - const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, - deletedContainerVersion }, updatedOptions)); + const containerContext = containerClient["storageClientContext"].container; + const containerUndeleteResponse = assertResponse(await containerContext.restore({ + deletedContainerName, + deletedContainerVersion, + tracingOptions: updatedOptions.tracingOptions, + })); return { containerClient, containerUndeleteResponse }; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Rename an existing Blob Container. @@ -188,25 +157,14 @@ export class BlobServiceClient extends StorageClient { /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. async renameContainer(sourceContainerName, destinationContainerName, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); - try { + return tracingClient.withSpan("BlobServiceClient-renameContainer", options, async (updatedOptions) => { + var _a; const containerClient = this.getContainerClient(destinationContainerName); // Hack to access a protected member. - const containerContext = new Container(containerClient["storageClientContext"]); - const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + const containerContext = containerClient["storageClientContext"].container; + const containerRenameResponse = assertResponse(await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId }))); return { containerClient, containerRenameResponse }; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Gets the properties of a storage account’s Blob service, including properties @@ -217,20 +175,12 @@ export class BlobServiceClient extends StorageClient { * @returns Response data for the Service Get Properties operation. */ async getProperties(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); - try { - return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets properties for a storage account’s Blob service endpoint, including properties @@ -242,20 +192,12 @@ export class BlobServiceClient extends StorageClient { * @returns Response data for the Service Set Properties operation. */ async setProperties(properties, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); - try { - return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.setProperties(properties, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Retrieves statistics related to replication for the Blob service. It is only @@ -267,20 +209,12 @@ export class BlobServiceClient extends StorageClient { * @returns Response data for the Service Get Statistics operation. */ async getStatistics(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); - try { - return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getStatistics({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * The Get Account Information operation returns the sku name and account kind @@ -293,20 +227,12 @@ export class BlobServiceClient extends StorageClient { * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); - try { - return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Returns a list of the containers under the specified account. @@ -323,20 +249,9 @@ export class BlobServiceClient extends StorageClient { * @returns Response data for the Service List Container Segment operation. */ async listContainersSegment(marker, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); - try { - return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.listContainersSegment(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions }))); + }); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags @@ -357,9 +272,14 @@ export class BlobServiceClient extends StorageClient { * @param options - Options to find blobs by tags. */ async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); - try { - const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.serviceContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { var _a; let tagValue = ""; @@ -369,17 +289,7 @@ export class BlobServiceClient extends StorageClient { return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. @@ -397,8 +307,8 @@ export class BlobServiceClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { let response; if (!!marker || marker === undefined) { do { @@ -419,20 +329,22 @@ export class BlobServiceClient extends StorageClient { * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { - var e_1, _a; + findBlobsByTagsItems(tagFilterSqlExpression_1) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { + var _a, e_1, _b, _c; let marker; try { - for (var _b = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { - const segment = _c.value; + for (var _d = true, _e = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; yield __await(yield* __asyncDelegator(__asyncValues(segment.blobs))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e)); } finally { if (e_1) throw e_1.error; } } @@ -554,8 +466,8 @@ export class BlobServiceClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list containers operation. */ - listSegments(marker, options = {}) { - return __asyncGenerator(this, arguments, function* listSegments_1() { + listSegments(marker_1) { + return __asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { let listContainersSegmentResponse; if (!!marker || marker === undefined) { do { @@ -573,20 +485,22 @@ export class BlobServiceClient extends StorageClient { * * @param options - Options to list containers operation. */ - listItems(options = {}) { - return __asyncGenerator(this, arguments, function* listItems_1() { - var e_2, _a; + listItems() { + return __asyncGenerator(this, arguments, function* listItems_1(options = {}) { + var _a, e_2, _b, _c; let marker; try { - for (var _b = __asyncValues(this.listSegments(marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { - const segment = _c.value; + for (var _d = true, _e = __asyncValues(this.listSegments(marker, options)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; yield __await(yield* __asyncDelegator(__asyncValues(segment.containerItems))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } @@ -716,12 +630,14 @@ export class BlobServiceClient extends StorageClient { * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ async getUserDelegationKey(startsOn, expiresOn, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); - try { - const response = await this.serviceContext.getUserDelegationKey({ + return tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { + const response = assertResponse(await this.serviceContext.getUserDelegationKey({ startsOn: truncatedISO8061Date(startsOn, false), expiresOn: truncatedISO8061Date(expiresOn, false), - }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + }, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); const userDelegationKey = { signedObjectId: response.signedObjectId, signedTenantId: response.signedTenantId, @@ -733,17 +649,7 @@ export class BlobServiceClient extends StorageClient { }; const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); return res; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Creates a BlobBatchClient object to conduct batch operations. diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js index 37cafbc2e..4cc0fa993 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Clients.js @@ -1,11 +1,14 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; -import { generateUuid, getDefaultProxySettings, isNode, isTokenCredential, URLBuilder, } from "@azure/core-http"; -import { SpanStatusCode } from "@azure/core-tracing"; +import { getDefaultProxySettings, } from "@azure/core-rest-pipeline"; +import { isTokenCredential } from "@azure/core-auth"; +import { isNode } from "@azure/core-util"; +import { randomUUID } from "@azure/core-util"; import { BlobDownloadResponse } from "./BlobDownloadResponse"; import { BlobQueryResponse } from "./BlobQueryResponse"; import { AnonymousCredential } from "./credentials/AnonymousCredential"; import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; -import { AppendBlob, Blob as StorageBlob, BlockBlob, PageBlob } from "./generated/src/operations"; import { ensureCpkIfSpecified, toAccessTier, } from "./models"; import { rangeResponseFromModel, } from "./PageBlobRangeResponse"; import { newPipeline, isPipelineLike } from "./Pipeline"; @@ -15,8 +18,8 @@ import { StorageClient } from "./StorageClient"; import { Batch } from "./utils/Batch"; import { BufferScheduler } from "../../storage-common/src"; import { BlobDoesNotUseCustomerSpecifiedEncryption, BlobUsesCustomerSpecifiedEncryptionMsg, BLOCK_BLOB_MAX_BLOCKS, BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES, BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES, DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES, DEFAULT_BLOCK_BUFFER_SIZE_BYTES, DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS, ETagAny, URLConstants, } from "./utils/constants"; -import { createSpan, convertTracingToRequestOptionsBase } from "./utils/tracing"; -import { appendToURLPath, appendToURLQuery, extractConnectionStringParts, ExtractPageRangeInfoItems, generateBlockID, getURLParameter, httpAuthorizationToString, isIpEndpointStyle, parseObjectReplicationRecord, setURLParameter, toBlobTags, toBlobTagsString, toQuerySerialization, toTags, } from "./utils/utils.common"; +import { tracingClient } from "./utils/tracing"; +import { appendToURLPath, appendToURLQuery, assertResponse, extractConnectionStringParts, ExtractPageRangeInfoItems, generateBlockID, getURLParameter, httpAuthorizationToString, isIpEndpointStyle, parseObjectReplicationRecord, setURLParameter, toBlobTags, toBlobTagsString, toQuerySerialization, toTags, } from "./utils/utils.common"; import { fsCreateReadStream, fsStat, readStreamToLocalFile, streamToBuffer, } from "./utils/utils.node"; import { generateBlobSASQueryParameters } from "./sas/BlobSASSignatureValues"; import { BlobLeaseClient } from "./BlobLeaseClient"; @@ -25,6 +28,18 @@ import { BlobLeaseClient } from "./BlobLeaseClient"; * append blob, or page blob. */ export class BlobClient extends StorageClient { + /** + * The name of the blob. + */ + get name() { + return this._name; + } + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ @@ -93,22 +108,10 @@ export class BlobClient extends StorageClient { super(url, pipeline); ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); - this.blobContext = new StorageBlob(this.storageClientContext); + this.blobContext = this.storageClientContext.blob; this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); } - /** - * The name of the blob. - */ - get name() { - return this._name; - } - /** - * The name of the storage container the blob is associated with. - */ - get containerName() { - return this._containerName; - } /** * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. * Provide "" will remove the snapshot and return a Client to the base blob. @@ -210,15 +213,25 @@ export class BlobClient extends StorageClient { * ``` */ async download(offset = 0, count, options = {}) { - var _a; options.conditions = options.conditions || {}; options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlobClient-download", options); - try { - const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + return tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blobContext.download({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { onDownloadProgress: isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream - }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + }, + range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); // Return browser response immediately if (!isNode) { @@ -270,17 +283,7 @@ export class BlobClient extends StorageClient { maxRetryRequests: options.maxRetryRequests, onProgress: options.onProgress, }); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Returns true if the Azure blob resource represented by this client exists; false otherwise. @@ -292,37 +295,31 @@ export class BlobClient extends StorageClient { * @param options - options to Exists operation. */ async exists(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-exists", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - await this.getProperties({ - abortSignal: options.abortSignal, - customerProvidedKey: options.customerProvidedKey, - conditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions, - }); - return true; - } - catch (e) { - if (e.statusCode === 404) { - // Expected exception when checking blob existence - return false; - } - else if (e.statusCode === 409 && - (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || - e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { - // Expected exception when checking blob existence + return tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); return true; } - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || + e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + // Expected exception when checking blob existence + return true; + } + throw e; + } + }); } /** * Returns all user-defined metadata, standard HTTP properties, and system properties @@ -337,24 +334,19 @@ export class BlobClient extends StorageClient { * @param options - Optional options to Get Properties operation. */ async getProperties(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); - try { - options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blobContext.getProperties({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Marks the specified blob or snapshot for deletion. The blob is later deleted @@ -366,22 +358,17 @@ export class BlobClient extends StorageClient { * @param options - Optional options to Blob Delete operation. */ async delete(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-delete", options); options.conditions = options.conditions || {}; - try { - return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.delete({ + abortSignal: options.abortSignal, + deleteSnapshots: options.deleteSnapshots, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted @@ -393,29 +380,19 @@ export class BlobClient extends StorageClient { * @param options - Optional options to Blob Delete operation. */ async deleteIfExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: "Expected exception when deleting a blob or snapshot only if it exists.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = assertResponse(await this.delete(updatedOptions)); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** * Restores the contents and metadata of soft deleted blob and any associated @@ -426,20 +403,12 @@ export class BlobClient extends StorageClient { * @param options - Optional options to Blob Undelete operation. */ async undelete(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-undelete", options); - try { - return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.undelete({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets system properties on the blob. @@ -457,23 +426,19 @@ export class BlobClient extends StorageClient { * @param options - Optional options to Blob Set HTTP Headers operation. */ async setHTTPHeaders(blobHTTPHeaders, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setHttpHeaders({ + abortSignal: options.abortSignal, + blobHttpHeaders: blobHTTPHeaders, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets user-defined metadata for the specified blob as one or more name-value pairs. @@ -487,23 +452,20 @@ export class BlobClient extends StorageClient { * @param options - Optional options to Set Metadata operation. */ async setMetadata(metadata, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets tags on the underlying blob. @@ -515,21 +477,16 @@ export class BlobClient extends StorageClient { * @param options - */ async setTags(tags, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setTags", options); - try { - return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + tags: toBlobTags(tags), + })); + }); } /** * Gets the tags associated with the underlying blob. @@ -537,23 +494,17 @@ export class BlobClient extends StorageClient { * @param options - */ async getTags(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-getTags", options); - try { - const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.blobContext.getTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Get a {@link BlobLeaseClient} that manages leases on the blob. @@ -571,23 +522,20 @@ export class BlobClient extends StorageClient { * @param options - Optional options to the Blob Create Snapshot operation. */ async createSnapshot(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.createSnapshot({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Asynchronously copies a blob to a destination within the storage account. @@ -689,20 +637,13 @@ export class BlobClient extends StorageClient { * @param options - Optional options to the Blob Abort Copy From URL operation. */ async abortCopyFromURL(copyId, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); - try { - return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not @@ -713,28 +654,33 @@ export class BlobClient extends StorageClient { * @param options - */ async syncCopyFromURL(copySource, options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - try { - return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e, _f, _g; + return assertResponse(await this.blobContext.copyFromURL(copySource, { + abortSignal: options.abortSignal, + metadata: options.metadata, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + sourceContentMD5: options.sourceContentMD5, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + immutabilityPolicyExpiry: (_f = options.immutabilityPolicy) === null || _f === void 0 ? void 0 : _f.expiriesOn, + immutabilityPolicyMode: (_g = options.immutabilityPolicy) === null || _g === void 0 ? void 0 : _g.policyMode, + legalHold: options.legalHold, + encryptionScope: options.encryptionScope, + copySourceTags: options.copySourceTags, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets the tier on a blob. The operation is allowed on a page blob in a premium @@ -748,23 +694,19 @@ export class BlobClient extends StorageClient { * @param options - Optional options to the Blob Set Tier operation. */ async setAccessTier(tier, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); - try { - return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setTier(toAccessTier(tier), { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + rehydratePriority: options.rehydratePriority, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } async downloadToBuffer(param1, param2, param3, param4 = {}) { + var _a; let buffer; let offset = 0; let count = 0; @@ -779,29 +721,26 @@ export class BlobClient extends StorageClient { count = typeof param2 === "number" ? param2 : 0; options = param3 || {}; } - const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); - try { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0) { - throw new RangeError("blockSize option must be >= 0"); - } - if (options.blockSize === 0) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - if (offset < 0) { - throw new RangeError("offset option must be >= 0"); - } - if (count && count <= 0) { - throw new RangeError("count option must be greater than 0"); - } - if (!options.conditions) { - options.conditions = {}; - } + let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + if (blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (blockSize === 0) { + blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { // Customer doesn't specify length, get it if (!count) { - const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); count = response.contentLength - offset; if (count < 0) { throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); @@ -821,19 +760,19 @@ export class BlobClient extends StorageClient { } let transferProgress = 0; const batch = new Batch(options.concurrency); - for (let off = offset; off < offset + count; off = off + options.blockSize) { + for (let off = offset; off < offset + count; off = off + blockSize) { batch.addOperation(async () => { // Exclusive chunk end position let chunkEnd = offset + count; - if (off + options.blockSize < chunkEnd) { - chunkEnd = off + options.blockSize; + if (off + blockSize < chunkEnd) { + chunkEnd = off + blockSize; } const response = await this.download(off, chunkEnd - off, { abortSignal: options.abortSignal, conditions: options.conditions, maxRetryRequests: options.maxRetryRequestsPerBlock, customerProvidedKey: options.customerProvidedKey, - tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), + tracingOptions: updatedOptions.tracingOptions, }); const stream = response.readableStreamBody; await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); @@ -848,17 +787,7 @@ export class BlobClient extends StorageClient { } await batch.do(); return buffer; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. @@ -877,26 +806,15 @@ export class BlobClient extends StorageClient { * at the specified path. */ async downloadToFile(filePath, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); - try { - const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + return tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); if (response.readableStreamBody) { await readStreamToLocalFile(response.readableStreamBody, filePath); } // The stream is no longer accessible so setting it to undefined. response.blobDownloadStream = undefined; return response; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } getBlobAndContainerNamesFromUrl() { let containerName; @@ -909,11 +827,11 @@ export class BlobClient extends StorageClient { // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` // http://localhost:10001/devstoreaccount1/containername/blob - const parsedUrl = URLBuilder.parse(this.url); - if (parsedUrl.getHost().split(".")[1] === "blob") { + const parsedUrl = new URL(this.url); + if (parsedUrl.host.split(".")[1] === "blob") { // "https://myaccount.blob.core.windows.net/containername/blob". // .getPath() -> /containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; } @@ -921,14 +839,14 @@ export class BlobClient extends StorageClient { // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob // .getPath() -> /devstoreaccount1/containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); containerName = pathComponents[2]; blobName = pathComponents[4]; } else { // "https://customdomain.com/containername/blob". // .getPath() -> /containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; } @@ -961,29 +879,32 @@ export class BlobClient extends StorageClient { * @param options - Optional options to the Blob Start Copy From URL operation. */ async startCopyFromURL(copySource, options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + return tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { + var _a, _b, _c; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return assertResponse(await this.blobContext.startCopyFromURL(copySource, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, sourceIfTags: options.sourceConditions.tagConditions, - }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + rehydratePriority: options.rehydratePriority, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + sealBlob: options.sealBlob, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Only available for BlobClient constructed with a shared key credential. @@ -1010,63 +931,38 @@ export class BlobClient extends StorageClient { * * @param options - Optional options to delete immutability policy on the blob. */ - async deleteImmutabilityPolicy(options) { - const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); - try { - return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async deleteImmutabilityPolicy(options = {}) { + return tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.deleteImmutabilityPolicy({ + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Set immutablility policy on the blob. + * Set immutability policy on the blob. * * @param options - Optional options to set immutability policy on the blob. */ - async setImmutabilityPolicy(immutabilityPolicy, options) { - const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); - try { - return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async setImmutabilityPolicy(immutabilityPolicy, options = {}) { + return tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.setImmutabilityPolicy({ + immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, + immutabilityPolicyMode: immutabilityPolicy.policyMode, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Set legal hold on the blob. * * @param options - Optional options to set legal hold on the blob. */ - async setLegalHold(legalHoldEnabled, options) { - const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); - try { - return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async setLegalHold(legalHoldEnabled, options = {}) { + return tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.setLegalHold(legalHoldEnabled, { + tracingOptions: updatedOptions.tracingOptions, + })); + }); } } /** @@ -1138,7 +1034,7 @@ export class AppendBlobClient extends BlobClient { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - this.appendBlobContext = new AppendBlob(this.storageClientContext); + this.appendBlobContext = this.storageClientContext.appendBlob; } /** * Creates a new AppendBlobClient object identical to the source but with the @@ -1166,23 +1062,25 @@ export class AppendBlobClient extends BlobClient { * ``` */ async create(options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.appendBlobContext.create(0, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. @@ -1192,30 +1090,20 @@ export class AppendBlobClient extends BlobClient { * @param options - */ async createIfNotExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); const conditions = { ifNoneMatch: ETagAny }; - try { - const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = assertResponse(await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions }))); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** * Seals the append blob, making it read only. @@ -1223,22 +1111,17 @@ export class AppendBlobClient extends BlobClient { * @param options - */ async seal(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); options.conditions = options.conditions || {}; - try { - return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.appendBlobContext.seal({ + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Commits a new block of data to the end of the existing append blob. @@ -1265,25 +1148,25 @@ export class AppendBlobClient extends BlobClient { * ``` */ async appendBlock(body, contentLength, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.appendBlobContext.appendBlock(contentLength, body, { + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { onUploadProgress: options.onProgress, - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * The Append Block operation commits a new block of data to the end of an existing append blob @@ -1300,29 +1183,31 @@ export class AppendBlobClient extends BlobClient { * @param options - */ async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e; + return assertResponse(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { + abortSignal: options.abortSignal, + sourceRange: rangeToString({ offset: sourceOffset, count }), + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + appendPositionAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } } /** @@ -1397,8 +1282,8 @@ export class BlockBlobClient extends BlobClient { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - this.blockBlobContext = new BlockBlob(this.storageClientContext); - this._blobContext = new StorageBlob(this.storageClientContext); + this.blockBlobContext = this.storageClientContext.blockBlob; + this._blobContext = this.storageClientContext.blob; } /** * Creates a new BlockBlobClient object identical to the source but with the @@ -1442,36 +1327,31 @@ export class BlockBlobClient extends BlobClient { * @param options - */ async query(query, options = {}) { - var _a; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); - try { - if (!isNode) { - throw new Error("This operation currently is only supported in Node.js."); - } - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { + if (!isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + return tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this._blobContext.query({ + abortSignal: options.abortSignal, + queryRequest: { queryType: "SQL", expression: query, inputSerialization: toQuerySerialization(options.inputTextConfiguration), outputSerialization: toQuerySerialization(options.outputTextConfiguration), - }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + }, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); return new BlobQueryResponse(response, { abortSignal: options.abortSignal, onProgress: options.onProgress, onError: options.onError, }); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Creates a new block blob, or updates the content of an existing block blob. @@ -1501,25 +1381,29 @@ export class BlockBlobClient extends BlobClient { * ``` */ async upload(body, contentLength, options = {}) { - var _a, _b, _c; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.blockBlobContext.upload(contentLength, body, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { onUploadProgress: options.onProgress, - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Creates a new Block Blob where the contents of the blob are read from a given URL. @@ -1540,29 +1424,18 @@ export class BlockBlobClient extends BlobClient { * @param options - Optional parameters. */ async syncUploadFromURL(sourceURL, options = {}) { - var _a, _b, _c, _d, _e; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, - sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, - sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, - sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, - sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, - }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e, _f; + return assertResponse(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + sourceIfTags: (_f = options.sourceConditions) === null || _f === void 0 ? void 0 : _f.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions }))); + }); } /** * Uploads the specified block to the block blob's "staging area" to be later @@ -1576,23 +1449,21 @@ export class BlockBlobClient extends BlobClient { * @returns Response data for the Block Blob Stage Block operation. */ async stageBlock(blockId, body, contentLength, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { + return assertResponse(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + requestOptions: { onUploadProgress: options.onProgress, - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * The Stage Block From URL operation creates a new block to be committed as part @@ -1616,21 +1487,20 @@ export class BlockBlobClient extends BlobClient { * @returns Response data for the Block Blob Stage Block From URL operation. */ async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { + return assertResponse(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Writes a blob by specifying the list of block IDs that make up the blob. @@ -1645,23 +1515,26 @@ export class BlockBlobClient extends BlobClient { * @returns Response data for the Block Blob Commit Block List operation. */ async commitBlockList(blocks, options = {}) { - var _a, _b, _c; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.blockBlobContext.commitBlockList({ latest: blocks }, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Returns the list of blocks that have been uploaded as part of a block blob @@ -1674,10 +1547,14 @@ export class BlockBlobClient extends BlobClient { * @returns Response data for the Block Blob Get Block List operation. */ async getBlockList(listType, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); - try { - const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blockBlobContext.getBlockList(listType, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); if (!res.committedBlocks) { res.committedBlocks = []; } @@ -1685,17 +1562,7 @@ export class BlockBlobClient extends BlobClient { res.uncommittedBlocks = []; } return res; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } // High level functions /** @@ -1714,8 +1581,7 @@ export class BlockBlobClient extends BlobClient { * @param options - */ async uploadData(data, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); - try { + return tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { if (isNode) { let buffer; if (data instanceof Buffer) { @@ -1734,17 +1600,7 @@ export class BlockBlobClient extends BlobClient { const browserBlob = new Blob([data]); return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); } - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * ONLY AVAILABLE IN BROWSERS. @@ -1766,21 +1622,10 @@ export class BlockBlobClient extends BlobClient { * @returns Response data for the Blob Upload operation. */ async uploadBrowserData(browserData, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); - try { + return tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { const browserBlob = new Blob([browserData]); - return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + }); } /** * @@ -1798,27 +1643,23 @@ export class BlockBlobClient extends BlobClient { * @returns Response data for the Blob Upload operation. */ async uploadSeekableInternal(bodyFactory, size, options = {}) { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + var _a, _b; + let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + if (blockSize < 0 || blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); } - if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { - options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; - } - if (options.maxSingleShotSize < 0 || - options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + const maxSingleShotSize = (_b = options.maxSingleShotSize) !== null && _b !== void 0 ? _b : BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + if (maxSingleShotSize < 0 || maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); } - if (options.blockSize === 0) { + if (blockSize === 0) { if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { throw new RangeError(`${size} is too larger to upload to a block blob.`); } - if (size > options.maxSingleShotSize) { - options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); - if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + if (size > maxSingleShotSize) { + blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; } } } @@ -1828,25 +1669,24 @@ export class BlockBlobClient extends BlobClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); - try { - if (size <= options.maxSingleShotSize) { - return await this.upload(bodyFactory(0, size), size, updatedOptions); + return tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { + if (size <= maxSingleShotSize) { + return assertResponse(await this.upload(bodyFactory(0, size), size, updatedOptions)); } - const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; + const numBlocks = Math.floor((size - 1) / blockSize) + 1; if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); } const blockList = []; - const blockIDPrefix = generateUuid(); + const blockIDPrefix = randomUUID(); let transferProgress = 0; const batch = new Batch(options.concurrency); for (let i = 0; i < numBlocks; i++) { batch.addOperation(async () => { const blockID = generateBlockID(blockIDPrefix, i); - const start = options.blockSize * i; - const end = i === numBlocks - 1 ? size : start + options.blockSize; + const start = blockSize * i; + const end = i === numBlocks - 1 ? size : start + blockSize; const contentLength = end - start; blockList.push(blockID); await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { @@ -1867,17 +1707,7 @@ export class BlockBlobClient extends BlobClient { } await batch.do(); return this.commitBlockList(blockList, updatedOptions); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. @@ -1893,27 +1723,16 @@ export class BlockBlobClient extends BlobClient { * @returns Response data for the Blob Upload operation. */ async uploadFile(filePath, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); - try { + return tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { const size = (await fsStat(filePath)).size; - return await this.uploadSeekableInternal((offset, count) => { + return this.uploadSeekableInternal((offset, count) => { return () => fsCreateReadStream(filePath, { autoClose: true, end: count ? offset + count - 1 : Infinity, start: offset, }); - }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }, size, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. @@ -1938,10 +1757,9 @@ export class BlockBlobClient extends BlobClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); - try { + return tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { let blockNum = 0; - const blockIDPrefix = generateUuid(); + const blockIDPrefix = randomUUID(); let transferProgress = 0; const blockList = []; const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { @@ -1965,18 +1783,8 @@ export class BlockBlobClient extends BlobClient { // Outgoing queue shouldn't be empty. Math.ceil((maxConcurrency / 4) * 3)); await scheduler.do(); - return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return assertResponse(await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }))); + }); } } /** @@ -2048,7 +1856,7 @@ export class PageBlobClient extends BlobClient { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - this.pageBlobContext = new PageBlob(this.storageClientContext); + this.pageBlobContext = this.storageClientContext.pageBlob; } /** * Creates a new PageBlobClient object identical to the source but with the @@ -2071,23 +1879,27 @@ export class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Create operation. */ async create(size, options = {}) { - var _a, _b, _c; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-create", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.pageBlobContext.create(0, size, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + blobSequenceNumber: options.blobSequenceNumber, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Creates a page blob of the specified length. Call uploadPages to upload data @@ -2099,30 +1911,20 @@ export class PageBlobClient extends BlobClient { * @param options - */ async createIfNotExists(size, options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); - try { - const conditions = { ifNoneMatch: ETagAny }; - const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = assertResponse(await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions }))); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. @@ -2135,25 +1937,26 @@ export class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Upload Pages operation. */ async uploadPages(body, offset, count, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.uploadPages(count, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { onUploadProgress: options.onProgress, - }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }, + range: rangeToString({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * The Upload Pages operation writes a range of pages to a page blob where the @@ -2167,29 +1970,30 @@ export class PageBlobClient extends BlobClient { * @param options - */ async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { - var _a; options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e; + return assertResponse(await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), { + abortSignal: options.abortSignal, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + sequenceNumberAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Frees the specified pages from the page blob. @@ -2201,22 +2005,20 @@ export class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Clear Pages operation. */ async clearPages(offset = 0, count, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); - try { - return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.clearPages(0, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Returns the list of valid page ranges for a page blob or snapshot of a page blob. @@ -2228,24 +2030,18 @@ export class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Get Ranges operation. */ async getPageRanges(offset = 0, count, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); - try { - return await this.pageBlobContext - .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(response); + }); } /** * getPageRangesSegment returns a single segment of page ranges starting from the @@ -2260,21 +2056,18 @@ export class PageBlobClient extends BlobClient { * @param options - Options to PageBlob Get Page Ranges Segment operation. */ async listPageRangesSegment(offset = 0, count, marker, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); - try { - return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + marker: marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} @@ -2290,8 +2083,8 @@ export class PageBlobClient extends BlobClient { * items. The marker value is opaque to the client. * @param options - Options to List Page Ranges operation. */ - listPageRangeItemSegments(offset = 0, count, marker, options = {}) { - return __asyncGenerator(this, arguments, function* listPageRangeItemSegments_1() { + listPageRangeItemSegments() { + return __asyncGenerator(this, arguments, function* listPageRangeItemSegments_1(offset = 0, count, marker, options = {}) { let getPageRangeItemSegmentsResponse; if (!!marker || marker === undefined) { do { @@ -2309,20 +2102,22 @@ export class PageBlobClient extends BlobClient { * @param count - Number of bytes to get. * @param options - Options to List Page Ranges operation. */ - listPageRangeItems(offset = 0, count, options = {}) { - return __asyncGenerator(this, arguments, function* listPageRangeItems_1() { - var e_1, _a; + listPageRangeItems() { + return __asyncGenerator(this, arguments, function* listPageRangeItems_1(offset = 0, count, options = {}) { + var _a, e_1, _b, _c; let marker; try { - for (var _b = __asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { - const getPageRangesSegment = _c.value; + for (var _d = true, _e = __asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const getPageRangesSegment = _c; yield __await(yield* __asyncDelegator(__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e)); } finally { if (e_1) throw e_1.error; } } @@ -2435,24 +2230,19 @@ export class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Get Page Range Diff operation. */ async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); - try { - return await this.pageBlobContext - .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { + var _a; + const result = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevsnapshot: prevSnapshot, + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(result); + }); } /** * getPageRangesDiffSegment returns a single segment of page ranges starting from the @@ -2468,25 +2258,23 @@ export class PageBlobClient extends BlobClient { * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); - try { - return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { + return tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, + leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevsnapshot: prevSnapshotOrUrl, + range: rangeToString({ offset: offset, count: count, - }), marker: marker, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }), + marker: marker, + maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} @@ -2526,18 +2314,20 @@ export class PageBlobClient extends BlobClient { */ listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { return __asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { - var e_2, _a; + var _a, e_2, _b, _c; let marker; try { - for (var _b = __asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { - const getPageRangesSegment = _c.value; + for (var _d = true, _e = __asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const getPageRangesSegment = _c; yield __await(yield* __asyncDelegator(__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } @@ -2651,24 +2441,19 @@ export class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Get Page Range Diff operation. */ async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); - try { - return await this.pageBlobContext - .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevSnapshotUrl, + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(response); + }); } /** * Resizes the page blob to the specified size (which must be a multiple of 512). @@ -2679,22 +2464,17 @@ export class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Resize operation. */ async resize(size, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); - try { - return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.resize(size, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets a page blob's sequence number. @@ -2706,22 +2486,17 @@ export class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Update Sequence Number operation. */ async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); - try { - return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { + abortSignal: options.abortSignal, + blobSequenceNumber: sequenceNumber, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. @@ -2737,21 +2512,14 @@ export class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Copy Incremental operation. */ async startCopyIncremental(copySource, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); - try { - return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.copyIncremental(copySource, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } } //# sourceMappingURL=Clients.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js index e6bd251a7..45925c6e9 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/ContainerClient.js @@ -1,13 +1,13 @@ import { __asyncDelegator, __asyncGenerator, __asyncValues, __await } from "tslib"; -import { getDefaultProxySettings, isNode, isTokenCredential, URLBuilder, } from "@azure/core-http"; -import { SpanStatusCode } from "@azure/core-tracing"; +import { getDefaultProxySettings, } from "@azure/core-rest-pipeline"; +import { isNode } from "@azure/core-util"; +import { isTokenCredential } from "@azure/core-auth"; import { AnonymousCredential } from "./credentials/AnonymousCredential"; import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; -import { Container } from "./generated/src/operations"; import { newPipeline, isPipelineLike } from "./Pipeline"; import { StorageClient } from "./StorageClient"; -import { convertTracingToRequestOptionsBase, createSpan } from "./utils/tracing"; -import { appendToURLPath, appendToURLQuery, BlobNameToString, ConvertInternalResponseOfListBlobFlat, ConvertInternalResponseOfListBlobHierarchy, EscapePath, extractConnectionStringParts, isIpEndpointStyle, parseObjectReplicationRecord, toTags, truncatedISO8061Date, } from "./utils/utils.common"; +import { tracingClient } from "./utils/tracing"; +import { appendToURLPath, appendToURLQuery, assertResponse, BlobNameToString, ConvertInternalResponseOfListBlobFlat, ConvertInternalResponseOfListBlobHierarchy, EscapePath, extractConnectionStringParts, isIpEndpointStyle, parseObjectReplicationRecord, toTags, truncatedISO8061Date, } from "./utils/utils.common"; import { generateBlobSASQueryParameters } from "./sas/BlobSASSignatureValues"; import { BlobLeaseClient } from "./BlobLeaseClient"; import { AppendBlobClient, BlobClient, BlockBlobClient, PageBlobClient, } from "./Clients"; @@ -16,6 +16,12 @@ import { BlobBatchClient } from "./BlobBatchClient"; * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. */ export class ContainerClient extends StorageClient { + /** + * The name of the container. + */ + get containerName() { + return this._containerName; + } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ @@ -76,13 +82,7 @@ export class ContainerClient extends StorageClient { } super(url, pipeline); this._containerName = this.getContainerNameFromUrl(); - this.containerContext = new Container(this.storageClientContext); - } - /** - * The name of the container. - */ - get containerName() { - return this._containerName; + this.containerContext = this.storageClientContext.container; } /** * Creates a new container under the specified account. If the container with @@ -102,22 +102,9 @@ export class ContainerClient extends StorageClient { * ``` */ async create(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-create", options); - try { - // Spread operator in destructuring assignments, - // this will filter out unwanted properties from the response object into result object - return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.create(updatedOptions)); + }); } /** * Creates a new container under the specified account. If the container with @@ -128,29 +115,21 @@ export class ContainerClient extends StorageClient { * @param options - */ async createIfNotExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); - try { - const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: "Expected exception when creating a container only if it does not already exist.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + else { + throw e; + } + } + }); } /** * Returns true if the Azure container resource represented by this client exists; false otherwise. @@ -162,31 +141,21 @@ export class ContainerClient extends StorageClient { * @param options - */ async exists(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-exists", options); - try { - await this.getProperties({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions, - }); - return true; - } - catch (e) { - if (e.statusCode === 404) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: "Expected exception when checking container existence", + return tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, }); - return false; + return true; } - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (e.statusCode === 404) { + return false; + } + throw e; + } + }); } /** * Creates a {@link BlobClient} @@ -247,20 +216,9 @@ export class ContainerClient extends StorageClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); - try { - return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), { tracingOptions: updatedOptions.tracingOptions }))); + }); } /** * Marks the specified container for deletion. The container and any blobs @@ -273,20 +231,14 @@ export class ContainerClient extends StorageClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("ContainerClient-delete", options); - try { - return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.delete({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Marks the specified container for deletion if it exists. The container and any blobs @@ -296,29 +248,19 @@ export class ContainerClient extends StorageClient { * @param options - Options to Container Delete operation. */ async deleteIfExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: "Expected exception when deleting a container only if it exists.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** * Sets one or more user-defined name-value pairs for the specified container. @@ -339,20 +281,15 @@ export class ContainerClient extends StorageClient { if (options.conditions.ifUnmodifiedSince) { throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); } - const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); - try { - return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Gets the permissions for the specified container. The permissions indicate @@ -369,9 +306,12 @@ export class ContainerClient extends StorageClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); - try { - const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.getAccessPolicy({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); const res = { _response: response._response, blobPublicAccess: response.blobPublicAccess, @@ -403,17 +343,7 @@ export class ContainerClient extends StorageClient { }); } return res; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Sets the permissions for the specified container. The permissions indicate @@ -434,8 +364,7 @@ export class ContainerClient extends StorageClient { */ async setAccessPolicy(access, containerAcl, options = {}) { options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); - try { + return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { const acl = []; for (const identifier of containerAcl || []) { acl.push({ @@ -451,18 +380,15 @@ export class ContainerClient extends StorageClient { id: identifier.id, }); } - return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return assertResponse(await this.containerContext.setAccessPolicy({ + abortSignal: options.abortSignal, + access, + containerAcl: acl, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Get a {@link BlobLeaseClient} that manages leases on the container. @@ -496,25 +422,14 @@ export class ContainerClient extends StorageClient { * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ async uploadBlockBlob(blobName, body, contentLength, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); - try { + return tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { const blockBlobClient = this.getBlockBlobClient(blobName); const response = await blockBlobClient.upload(body, contentLength, updatedOptions); return { blockBlobClient, response, }; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Marks the specified blob or snapshot for deletion. The blob is later deleted @@ -528,24 +443,13 @@ export class ContainerClient extends StorageClient { * @returns Block blob deletion response data. */ async deleteBlob(blobName, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); - try { + return tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { let blobClient = this.getBlobClient(blobName); if (options.versionId) { blobClient = blobClient.withVersion(options.versionId); } - return await blobClient.delete(updatedOptions); - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return blobClient.delete(updatedOptions); + }); } /** * listBlobFlatSegment returns a single segment of blobs starting from the @@ -558,25 +462,14 @@ export class ContainerClient extends StorageClient { * @param options - Options to Container List Blob Flat Segment operation. */ async listBlobFlatSegment(marker, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); - try { - const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); return blobItem; }) }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * listBlobHierarchySegment returns a single segment of blobs starting from @@ -590,29 +483,18 @@ export class ContainerClient extends StorageClient { * @param options - Options to Container List Blob Hierarchy Segment operation. */ async listBlobHierarchySegment(delimiter, marker, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); - try { - const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); return blobItem; }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); return blobPrefix; }) }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse @@ -626,8 +508,8 @@ export class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listSegments(marker, options = {}) { - return __asyncGenerator(this, arguments, function* listSegments_1() { + listSegments(marker_1) { + return __asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { let listBlobsFlatSegmentResponse; if (!!marker || marker === undefined) { do { @@ -643,20 +525,22 @@ export class ContainerClient extends StorageClient { * * @param options - Options to list blobs operation. */ - listItems(options = {}) { - return __asyncGenerator(this, arguments, function* listItems_1() { - var e_1, _a; + listItems() { + return __asyncGenerator(this, arguments, function* listItems_1(options = {}) { + var _a, e_1, _b, _c; let marker; try { - for (var _b = __asyncValues(this.listSegments(marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { - const listBlobsFlatSegmentResponse = _c.value; + for (var _d = true, _e = __asyncValues(this.listSegments(marker, options)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const listBlobsFlatSegmentResponse = _c; yield __await(yield* __asyncDelegator(__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e)); } finally { if (e_1) throw e_1.error; } } @@ -804,8 +688,8 @@ export class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listHierarchySegments(delimiter, marker, options = {}) { - return __asyncGenerator(this, arguments, function* listHierarchySegments_1() { + listHierarchySegments(delimiter_1, marker_1) { + return __asyncGenerator(this, arguments, function* listHierarchySegments_1(delimiter, marker, options = {}) { let listBlobsHierarchySegmentResponse; if (!!marker || marker === undefined) { do { @@ -822,13 +706,15 @@ export class ContainerClient extends StorageClient { * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ - listItemsByHierarchy(delimiter, options = {}) { - return __asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { - var e_2, _a; + listItemsByHierarchy(delimiter_1) { + return __asyncGenerator(this, arguments, function* listItemsByHierarchy_1(delimiter, options = {}) { + var _a, e_2, _b, _c; let marker; try { - for (var _b = __asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { - const listBlobsHierarchySegmentResponse = _c.value; + for (var _d = true, _e = __asyncValues(this.listHierarchySegments(delimiter, marker, options)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const listBlobsHierarchySegmentResponse = _c; const segment = listBlobsHierarchySegmentResponse.segment; if (segment.blobPrefixes) { for (const prefix of segment.blobPrefixes) { @@ -843,7 +729,7 @@ export class ContainerClient extends StorageClient { catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } @@ -1007,9 +893,14 @@ export class ContainerClient extends StorageClient { * @param options - Options to find blobs by tags. */ async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); - try { - const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { var _a; let tagValue = ""; @@ -1019,17 +910,7 @@ export class ContainerClient extends StorageClient { return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. @@ -1047,8 +928,8 @@ export class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { let response; if (!!marker || marker === undefined) { do { @@ -1069,20 +950,22 @@ export class ContainerClient extends StorageClient { * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { - var e_3, _a; + findBlobsByTagsItems(tagFilterSqlExpression_1) { + return __asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { + var _a, e_3, _b, _c; let marker; try { - for (var _b = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield __await(_b.next()), !_c.done;) { - const segment = _c.value; + for (var _d = true, _e = __asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield __await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; yield __await(yield* __asyncDelegator(__asyncValues(segment.blobs))); } } catch (e_3_1) { e_3 = { error: e_3_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield __await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield __await(_b.call(_e)); } finally { if (e_3) throw e_3.error; } } @@ -1198,23 +1081,23 @@ export class ContainerClient extends StorageClient { // "https://myaccount.blob.core.windows.net/mycontainer"; // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` // http://localhost:10001/devstoreaccount1/containername - const parsedUrl = URLBuilder.parse(this.url); - if (parsedUrl.getHost().split(".")[1] === "blob") { + const parsedUrl = new URL(this.url); + if (parsedUrl.hostname.split(".")[1] === "blob") { // "https://myaccount.blob.core.windows.net/containername". // "https://customdomain.com/containername". // .getPath() -> /containername - containerName = parsedUrl.getPath().split("/")[1]; + containerName = parsedUrl.pathname.split("/")[1]; } else if (isIpEndpointStyle(parsedUrl)) { // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername // .getPath() -> /devstoreaccount1/containername - containerName = parsedUrl.getPath().split("/")[2]; + containerName = parsedUrl.pathname.split("/")[2]; } else { // "https://customdomain.com/containername". // .getPath() -> /containername - containerName = parsedUrl.getPath().split("/")[1]; + containerName = parsedUrl.pathname.split("/")[1]; } // decode the encoded containerName - to get all the special characters that might be present in it containerName = decodeURIComponent(containerName); diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js index bbb977ae9..262b2da70 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/Pipeline.js @@ -1,18 +1,24 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { BaseRequestPolicy, deserializationPolicy, disableResponseDecompressionPolicy, HttpHeaders, RequestPolicyOptions, WebResource, proxyPolicy, isNode, isTokenCredential, tracingPolicy, logPolicy, keepAlivePolicy, generateClientRequestIdPolicy, } from "@azure/core-http"; +import { __rest } from "tslib"; +import { convertHttpClient, createRequestPolicyFactoryPolicy, } from "@azure/core-http-compat"; +import { bearerTokenAuthenticationPolicy, decompressResponsePolicyName, } from "@azure/core-rest-pipeline"; +import { authorizeRequestOnTenantChallenge, createClientPipeline } from "@azure/core-client"; +import { parseXML, stringifyXML } from "@azure/core-xml"; +import { isTokenCredential } from "@azure/core-auth"; import { logger } from "./log"; -import { StorageBrowserPolicyFactory } from "./StorageBrowserPolicyFactory"; import { StorageRetryPolicyFactory } from "./StorageRetryPolicyFactory"; +import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; import { AnonymousCredential } from "./credentials/AnonymousCredential"; -import { StorageOAuthScopes, StorageBlobLoggingAllowedHeaderNames, StorageBlobLoggingAllowedQueryParameters, } from "./utils/constants"; -import { TelemetryPolicyFactory } from "./TelemetryPolicyFactory"; +import { StorageOAuthScopes, StorageBlobLoggingAllowedHeaderNames, StorageBlobLoggingAllowedQueryParameters, SDK_VERSION, } from "./utils/constants"; import { getCachedDefaultHttpClient } from "./utils/cache"; -import { attachCredential } from "./utils/utils.common"; -import { storageBearerTokenChallengeAuthenticationPolicy } from "./policies/StorageBearerTokenChallengeAuthenticationPolicy"; +import { storageBrowserPolicy } from "./policies/StorageBrowserPolicyV2"; +import { storageRetryPolicy } from "./policies/StorageRetryPolicyV2"; +import { storageSharedKeyCredentialPolicy } from "./policies/StorageSharedKeyCredentialPolicyV2"; +import { StorageBrowserPolicyFactory } from "./StorageBrowserPolicyFactory"; // Export following interfaces and types for customers who want to implement their // own RequestPolicy or HTTPClient -export { BaseRequestPolicy, StorageOAuthScopes, deserializationPolicy, HttpHeaders, WebResource, RequestPolicyOptions, }; +export { StorageOAuthScopes, }; /** * A helper to decide if a given argument satisfies the Pipeline contract * @param pipeline - An argument that may be a Pipeline @@ -44,9 +50,7 @@ export class Pipeline { */ constructor(factories, options = {}) { this.factories = factories; - // when options.httpClient is not specified, passing in a DefaultHttpClient instance to - // avoid each client creating its own http client. - this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + this.options = options; } /** * Transfer Pipeline object to ServiceClientOptions object which is required by @@ -69,39 +73,187 @@ export class Pipeline { * @returns A new Pipeline object. */ export function newPipeline(credential, pipelineOptions = {}) { - var _a; - if (credential === undefined) { + if (!credential) { credential = new AnonymousCredential(); } - // Order is important. Closer to the API at the top & closer to the network at the bottom. - // The credential's policy factory must appear close to the wire so it can sign any - // changes made by other factories (like UniqueRequestIDPolicyFactory) - const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); - const factories = [ - tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), - keepAlivePolicy(pipelineOptions.keepAliveOptions), - telemetryPolicy, - generateClientRequestIdPolicy(), - new StorageBrowserPolicyFactory(), - new StorageRetryPolicyFactory(pipelineOptions.retryOptions), - // Default deserializationPolicy is provided by protocol layer - // Use customized XML char key of "#" so we could deserialize metadata - // with "_" key - deserializationPolicy(undefined, { xmlCharKey: "#" }), - logPolicy({ - logger: logger.info, - allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, - allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, - }), + const pipeline = new Pipeline([], pipelineOptions); + pipeline._credential = credential; + return pipeline; +} +function processDownlevelPipeline(pipeline) { + const knownFactoryFunctions = [ + isAnonymousCredential, + isStorageSharedKeyCredential, + isCoreHttpBearerTokenFactory, + isStorageBrowserPolicyFactory, + isStorageRetryPolicyFactory, + isStorageTelemetryPolicyFactory, + isCoreHttpPolicyFactory, ]; - if (isNode) { - // policies only available in Node.js runtime, not in browsers - factories.push(proxyPolicy(pipelineOptions.proxyOptions)); - factories.push(disableResponseDecompressionPolicy()); + if (pipeline.factories.length) { + const novelFactories = pipeline.factories.filter((factory) => { + return !knownFactoryFunctions.some((knownFactory) => knownFactory(factory)); + }); + if (novelFactories.length) { + const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory)); + // if there are any left over, wrap in a requestPolicyFactoryPolicy + return { + wrappedPolicies: createRequestPolicyFactoryPolicy(novelFactories), + afterRetry: hasInjector, + }; + } + } + return undefined; +} +export function getCoreClientOptions(pipeline) { + var _a; + const _b = pipeline.options, { httpClient: v1Client } = _b, restOptions = __rest(_b, ["httpClient"]); + let httpClient = pipeline._coreHttpClient; + if (!httpClient) { + httpClient = v1Client ? convertHttpClient(v1Client) : getCachedDefaultHttpClient(); + pipeline._coreHttpClient = httpClient; + } + let corePipeline = pipeline._corePipeline; + if (!corePipeline) { + const packageDetails = `azsdk-js-azure-storage-blob/${SDK_VERSION}`; + const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix + ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + corePipeline = createClientPipeline(Object.assign(Object.assign({}, restOptions), { loggingOptions: { + additionalAllowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + additionalAllowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + logger: logger.info, + }, userAgentOptions: { + userAgentPrefix, + }, serializationOptions: { + stringifyXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#", + }, + }, + }, deserializationOptions: { + parseXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#", + }, + }, + } })); + corePipeline.removePolicy({ phase: "Retry" }); + corePipeline.removePolicy({ name: decompressResponsePolicyName }); + corePipeline.addPolicy(storageRetryPolicy(restOptions.retryOptions), { phase: "Retry" }); + corePipeline.addPolicy(storageBrowserPolicy()); + const downlevelResults = processDownlevelPipeline(pipeline); + if (downlevelResults) { + corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : undefined); + } + const credential = getCredentialFromPipeline(pipeline); + if (isTokenCredential(credential)) { + corePipeline.addPolicy(bearerTokenAuthenticationPolicy({ + credential, + scopes: (_a = restOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: authorizeRequestOnTenantChallenge }, + }), { phase: "Sign" }); + } + else if (credential instanceof StorageSharedKeyCredential) { + corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + accountName: credential.accountName, + accountKey: credential.accountKey, + }), { phase: "Sign" }); + } + pipeline._corePipeline = corePipeline; + } + return Object.assign(Object.assign({}, restOptions), { allowInsecureConnection: true, httpClient, pipeline: corePipeline }); +} +export function getCredentialFromPipeline(pipeline) { + // see if we squirreled one away on the type itself + if (pipeline._credential) { + return pipeline._credential; + } + // if it came from another package, loop over the factories and look for one like before + let credential = new AnonymousCredential(); + for (const factory of pipeline.factories) { + if (isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + credential = factory.credential; + } + else if (isStorageSharedKeyCredential(factory)) { + return factory; + } + } + return credential; +} +function isStorageSharedKeyCredential(factory) { + if (factory instanceof StorageSharedKeyCredential) { + return true; + } + return factory.constructor.name === "StorageSharedKeyCredential"; +} +function isAnonymousCredential(factory) { + if (factory instanceof AnonymousCredential) { + return true; + } + return factory.constructor.name === "AnonymousCredential"; +} +function isCoreHttpBearerTokenFactory(factory) { + return isTokenCredential(factory.credential); +} +function isStorageBrowserPolicyFactory(factory) { + if (factory instanceof StorageBrowserPolicyFactory) { + return true; } - factories.push(isTokenCredential(credential) - ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) - : credential); - return new Pipeline(factories, pipelineOptions); + return factory.constructor.name === "StorageBrowserPolicyFactory"; +} +function isStorageRetryPolicyFactory(factory) { + if (factory instanceof StorageRetryPolicyFactory) { + return true; + } + return factory.constructor.name === "StorageRetryPolicyFactory"; +} +function isStorageTelemetryPolicyFactory(factory) { + return factory.constructor.name === "TelemetryPolicyFactory"; +} +function isInjectorPolicyFactory(factory) { + return factory.constructor.name === "InjectorPolicyFactory"; +} +function isCoreHttpPolicyFactory(factory) { + const knownPolicies = [ + "GenerateClientRequestIdPolicy", + "TracingPolicy", + "LogPolicy", + "ProxyPolicy", + "DisableResponseDecompressionPolicy", + "KeepAlivePolicy", + "DeserializationPolicy", + ]; + const mockHttpClient = { + sendRequest: async (request) => { + return { + request, + headers: request.headers.clone(), + status: 500, + }; + }, + }; + const mockRequestPolicyOptions = { + log(_logLevel, _message) { + /* do nothing */ + }, + shouldLog(_logLevel) { + return false; + }, + }; + const policyInstance = factory.create(mockHttpClient, mockRequestPolicyOptions); + const policyName = policyInstance.constructor.name; + // bundlers sometimes add a custom suffix to the class name to make it unique + return knownPolicies.some((knownPolicyName) => { + return policyName.startsWith(knownPolicyName); + }); } //# sourceMappingURL=Pipeline.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js index c36e4aceb..c8ca78a2a 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageClient.js @@ -1,10 +1,8 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { StorageClientContext } from "./generated/src/storageClientContext"; +import { StorageContextClient } from "./StorageContextClient"; +import { getCoreClientOptions, getCredentialFromPipeline } from "./Pipeline"; import { escapeURLPath, getURLScheme, iEqual, getAccountNameFromUrl } from "./utils/utils.common"; -import { AnonymousCredential } from "./credentials/AnonymousCredential"; -import { StorageSharedKeyCredential } from "./credentials/StorageSharedKeyCredential"; -import { isTokenCredential, isNode } from "@azure/core-http"; /** * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} * and etc. @@ -20,20 +18,9 @@ export class StorageClient { this.url = escapeURLPath(url); this.accountName = getAccountNameFromUrl(url); this.pipeline = pipeline; - this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); + this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline)); this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); - this.credential = new AnonymousCredential(); - for (const factory of this.pipeline.factories) { - if ((isNode && factory instanceof StorageSharedKeyCredential) || - factory instanceof AnonymousCredential) { - this.credential = factory; - } - else if (isTokenCredential(factory.credential)) { - // Only works if the factory has been attached a "credential" property. - // We do that in newPipeline() when using TokenCredential. - this.credential = factory.credential; - } - } + this.credential = getCredentialFromPipeline(pipeline); // Override protocol layer's default content-type const storageClientContext = this.storageClientContext; storageClientContext.requestContentType = undefined; diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageContextClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageContextClient.js new file mode 100644 index 000000000..08c4b5dc9 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/StorageContextClient.js @@ -0,0 +1,17 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { StorageClient } from "./generated/src"; +/** + * @internal + */ +export class StorageContextClient extends StorageClient { + async sendOperationRequest(operationArguments, operationSpec) { + const operationSpecToSend = Object.assign({}, operationSpec); + if (operationSpecToSend.path === "/{containerName}" || + operationSpecToSend.path === "/{containerName}/{blob}") { + operationSpecToSend.path = ""; + } + return super.sendOperationRequest(operationArguments, operationSpecToSend); + } +} +//# sourceMappingURL=StorageContextClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js deleted file mode 100644 index a2e71a3b7..000000000 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/TelemetryPolicyFactory.js +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { isNode, } from "@azure/core-http"; -import * as os from "os"; -import { TelemetryPolicy } from "./policies/TelemetryPolicy"; -import { SDK_VERSION } from "./utils/constants"; -/** - * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. - */ -export class TelemetryPolicyFactory { - /** - * Creates an instance of TelemetryPolicyFactory. - * @param telemetry - - */ - constructor(telemetry) { - const userAgentInfo = []; - if (isNode) { - if (telemetry) { - const telemetryString = telemetry.userAgentPrefix || ""; - if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { - userAgentInfo.push(telemetryString); - } - } - // e.g. azsdk-js-storageblob/10.0.0 - const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; - if (userAgentInfo.indexOf(libInfo) === -1) { - userAgentInfo.push(libInfo); - } - // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) - let runtimeInfo = `(NODE-VERSION ${process.version})`; - if (os) { - runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`; - } - if (userAgentInfo.indexOf(runtimeInfo) === -1) { - userAgentInfo.push(runtimeInfo); - } - } - this.telemetryString = userAgentInfo.join(" "); - } - /** - * Creates a TelemetryPolicy object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new TelemetryPolicy(nextPolicy, options, this.telemetryString); - } -} -//# sourceMappingURL=TelemetryPolicyFactory.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js index 290e550ab..0fb1eaa7a 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/index.js @@ -7,5 +7,5 @@ */ export * from "./models"; export { StorageClient } from "./storageClient"; -export { StorageClientContext } from "./storageClientContext"; +export * from "./operationsInterfaces"; //# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js index 959334483..226a4e6ab 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/index.js @@ -5,5 +5,252 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -export {}; +/** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ +export var KnownEncryptionAlgorithmType; +(function (KnownEncryptionAlgorithmType) { + /** AES256 */ + KnownEncryptionAlgorithmType["AES256"] = "AES256"; +})(KnownEncryptionAlgorithmType || (KnownEncryptionAlgorithmType = {})); +/** Known values of {@link BlobExpiryOptions} that the service accepts. */ +export var KnownBlobExpiryOptions; +(function (KnownBlobExpiryOptions) { + /** NeverExpire */ + KnownBlobExpiryOptions["NeverExpire"] = "NeverExpire"; + /** RelativeToCreation */ + KnownBlobExpiryOptions["RelativeToCreation"] = "RelativeToCreation"; + /** RelativeToNow */ + KnownBlobExpiryOptions["RelativeToNow"] = "RelativeToNow"; + /** Absolute */ + KnownBlobExpiryOptions["Absolute"] = "Absolute"; +})(KnownBlobExpiryOptions || (KnownBlobExpiryOptions = {})); +/** Known values of {@link StorageErrorCode} that the service accepts. */ +export var KnownStorageErrorCode; +(function (KnownStorageErrorCode) { + /** AccountAlreadyExists */ + KnownStorageErrorCode["AccountAlreadyExists"] = "AccountAlreadyExists"; + /** AccountBeingCreated */ + KnownStorageErrorCode["AccountBeingCreated"] = "AccountBeingCreated"; + /** AccountIsDisabled */ + KnownStorageErrorCode["AccountIsDisabled"] = "AccountIsDisabled"; + /** AuthenticationFailed */ + KnownStorageErrorCode["AuthenticationFailed"] = "AuthenticationFailed"; + /** AuthorizationFailure */ + KnownStorageErrorCode["AuthorizationFailure"] = "AuthorizationFailure"; + /** ConditionHeadersNotSupported */ + KnownStorageErrorCode["ConditionHeadersNotSupported"] = "ConditionHeadersNotSupported"; + /** ConditionNotMet */ + KnownStorageErrorCode["ConditionNotMet"] = "ConditionNotMet"; + /** EmptyMetadataKey */ + KnownStorageErrorCode["EmptyMetadataKey"] = "EmptyMetadataKey"; + /** InsufficientAccountPermissions */ + KnownStorageErrorCode["InsufficientAccountPermissions"] = "InsufficientAccountPermissions"; + /** InternalError */ + KnownStorageErrorCode["InternalError"] = "InternalError"; + /** InvalidAuthenticationInfo */ + KnownStorageErrorCode["InvalidAuthenticationInfo"] = "InvalidAuthenticationInfo"; + /** InvalidHeaderValue */ + KnownStorageErrorCode["InvalidHeaderValue"] = "InvalidHeaderValue"; + /** InvalidHttpVerb */ + KnownStorageErrorCode["InvalidHttpVerb"] = "InvalidHttpVerb"; + /** InvalidInput */ + KnownStorageErrorCode["InvalidInput"] = "InvalidInput"; + /** InvalidMd5 */ + KnownStorageErrorCode["InvalidMd5"] = "InvalidMd5"; + /** InvalidMetadata */ + KnownStorageErrorCode["InvalidMetadata"] = "InvalidMetadata"; + /** InvalidQueryParameterValue */ + KnownStorageErrorCode["InvalidQueryParameterValue"] = "InvalidQueryParameterValue"; + /** InvalidRange */ + KnownStorageErrorCode["InvalidRange"] = "InvalidRange"; + /** InvalidResourceName */ + KnownStorageErrorCode["InvalidResourceName"] = "InvalidResourceName"; + /** InvalidUri */ + KnownStorageErrorCode["InvalidUri"] = "InvalidUri"; + /** InvalidXmlDocument */ + KnownStorageErrorCode["InvalidXmlDocument"] = "InvalidXmlDocument"; + /** InvalidXmlNodeValue */ + KnownStorageErrorCode["InvalidXmlNodeValue"] = "InvalidXmlNodeValue"; + /** Md5Mismatch */ + KnownStorageErrorCode["Md5Mismatch"] = "Md5Mismatch"; + /** MetadataTooLarge */ + KnownStorageErrorCode["MetadataTooLarge"] = "MetadataTooLarge"; + /** MissingContentLengthHeader */ + KnownStorageErrorCode["MissingContentLengthHeader"] = "MissingContentLengthHeader"; + /** MissingRequiredQueryParameter */ + KnownStorageErrorCode["MissingRequiredQueryParameter"] = "MissingRequiredQueryParameter"; + /** MissingRequiredHeader */ + KnownStorageErrorCode["MissingRequiredHeader"] = "MissingRequiredHeader"; + /** MissingRequiredXmlNode */ + KnownStorageErrorCode["MissingRequiredXmlNode"] = "MissingRequiredXmlNode"; + /** MultipleConditionHeadersNotSupported */ + KnownStorageErrorCode["MultipleConditionHeadersNotSupported"] = "MultipleConditionHeadersNotSupported"; + /** OperationTimedOut */ + KnownStorageErrorCode["OperationTimedOut"] = "OperationTimedOut"; + /** OutOfRangeInput */ + KnownStorageErrorCode["OutOfRangeInput"] = "OutOfRangeInput"; + /** OutOfRangeQueryParameterValue */ + KnownStorageErrorCode["OutOfRangeQueryParameterValue"] = "OutOfRangeQueryParameterValue"; + /** RequestBodyTooLarge */ + KnownStorageErrorCode["RequestBodyTooLarge"] = "RequestBodyTooLarge"; + /** ResourceTypeMismatch */ + KnownStorageErrorCode["ResourceTypeMismatch"] = "ResourceTypeMismatch"; + /** RequestUrlFailedToParse */ + KnownStorageErrorCode["RequestUrlFailedToParse"] = "RequestUrlFailedToParse"; + /** ResourceAlreadyExists */ + KnownStorageErrorCode["ResourceAlreadyExists"] = "ResourceAlreadyExists"; + /** ResourceNotFound */ + KnownStorageErrorCode["ResourceNotFound"] = "ResourceNotFound"; + /** ServerBusy */ + KnownStorageErrorCode["ServerBusy"] = "ServerBusy"; + /** UnsupportedHeader */ + KnownStorageErrorCode["UnsupportedHeader"] = "UnsupportedHeader"; + /** UnsupportedXmlNode */ + KnownStorageErrorCode["UnsupportedXmlNode"] = "UnsupportedXmlNode"; + /** UnsupportedQueryParameter */ + KnownStorageErrorCode["UnsupportedQueryParameter"] = "UnsupportedQueryParameter"; + /** UnsupportedHttpVerb */ + KnownStorageErrorCode["UnsupportedHttpVerb"] = "UnsupportedHttpVerb"; + /** AppendPositionConditionNotMet */ + KnownStorageErrorCode["AppendPositionConditionNotMet"] = "AppendPositionConditionNotMet"; + /** BlobAlreadyExists */ + KnownStorageErrorCode["BlobAlreadyExists"] = "BlobAlreadyExists"; + /** BlobImmutableDueToPolicy */ + KnownStorageErrorCode["BlobImmutableDueToPolicy"] = "BlobImmutableDueToPolicy"; + /** BlobNotFound */ + KnownStorageErrorCode["BlobNotFound"] = "BlobNotFound"; + /** BlobOverwritten */ + KnownStorageErrorCode["BlobOverwritten"] = "BlobOverwritten"; + /** BlobTierInadequateForContentLength */ + KnownStorageErrorCode["BlobTierInadequateForContentLength"] = "BlobTierInadequateForContentLength"; + /** BlobUsesCustomerSpecifiedEncryption */ + KnownStorageErrorCode["BlobUsesCustomerSpecifiedEncryption"] = "BlobUsesCustomerSpecifiedEncryption"; + /** BlockCountExceedsLimit */ + KnownStorageErrorCode["BlockCountExceedsLimit"] = "BlockCountExceedsLimit"; + /** BlockListTooLong */ + KnownStorageErrorCode["BlockListTooLong"] = "BlockListTooLong"; + /** CannotChangeToLowerTier */ + KnownStorageErrorCode["CannotChangeToLowerTier"] = "CannotChangeToLowerTier"; + /** CannotVerifyCopySource */ + KnownStorageErrorCode["CannotVerifyCopySource"] = "CannotVerifyCopySource"; + /** ContainerAlreadyExists */ + KnownStorageErrorCode["ContainerAlreadyExists"] = "ContainerAlreadyExists"; + /** ContainerBeingDeleted */ + KnownStorageErrorCode["ContainerBeingDeleted"] = "ContainerBeingDeleted"; + /** ContainerDisabled */ + KnownStorageErrorCode["ContainerDisabled"] = "ContainerDisabled"; + /** ContainerNotFound */ + KnownStorageErrorCode["ContainerNotFound"] = "ContainerNotFound"; + /** ContentLengthLargerThanTierLimit */ + KnownStorageErrorCode["ContentLengthLargerThanTierLimit"] = "ContentLengthLargerThanTierLimit"; + /** CopyAcrossAccountsNotSupported */ + KnownStorageErrorCode["CopyAcrossAccountsNotSupported"] = "CopyAcrossAccountsNotSupported"; + /** CopyIdMismatch */ + KnownStorageErrorCode["CopyIdMismatch"] = "CopyIdMismatch"; + /** FeatureVersionMismatch */ + KnownStorageErrorCode["FeatureVersionMismatch"] = "FeatureVersionMismatch"; + /** IncrementalCopyBlobMismatch */ + KnownStorageErrorCode["IncrementalCopyBlobMismatch"] = "IncrementalCopyBlobMismatch"; + /** IncrementalCopyOfEarlierVersionSnapshotNotAllowed */ + KnownStorageErrorCode["IncrementalCopyOfEarlierVersionSnapshotNotAllowed"] = "IncrementalCopyOfEarlierVersionSnapshotNotAllowed"; + /** IncrementalCopySourceMustBeSnapshot */ + KnownStorageErrorCode["IncrementalCopySourceMustBeSnapshot"] = "IncrementalCopySourceMustBeSnapshot"; + /** InfiniteLeaseDurationRequired */ + KnownStorageErrorCode["InfiniteLeaseDurationRequired"] = "InfiniteLeaseDurationRequired"; + /** InvalidBlobOrBlock */ + KnownStorageErrorCode["InvalidBlobOrBlock"] = "InvalidBlobOrBlock"; + /** InvalidBlobTier */ + KnownStorageErrorCode["InvalidBlobTier"] = "InvalidBlobTier"; + /** InvalidBlobType */ + KnownStorageErrorCode["InvalidBlobType"] = "InvalidBlobType"; + /** InvalidBlockId */ + KnownStorageErrorCode["InvalidBlockId"] = "InvalidBlockId"; + /** InvalidBlockList */ + KnownStorageErrorCode["InvalidBlockList"] = "InvalidBlockList"; + /** InvalidOperation */ + KnownStorageErrorCode["InvalidOperation"] = "InvalidOperation"; + /** InvalidPageRange */ + KnownStorageErrorCode["InvalidPageRange"] = "InvalidPageRange"; + /** InvalidSourceBlobType */ + KnownStorageErrorCode["InvalidSourceBlobType"] = "InvalidSourceBlobType"; + /** InvalidSourceBlobUrl */ + KnownStorageErrorCode["InvalidSourceBlobUrl"] = "InvalidSourceBlobUrl"; + /** InvalidVersionForPageBlobOperation */ + KnownStorageErrorCode["InvalidVersionForPageBlobOperation"] = "InvalidVersionForPageBlobOperation"; + /** LeaseAlreadyPresent */ + KnownStorageErrorCode["LeaseAlreadyPresent"] = "LeaseAlreadyPresent"; + /** LeaseAlreadyBroken */ + KnownStorageErrorCode["LeaseAlreadyBroken"] = "LeaseAlreadyBroken"; + /** LeaseIdMismatchWithBlobOperation */ + KnownStorageErrorCode["LeaseIdMismatchWithBlobOperation"] = "LeaseIdMismatchWithBlobOperation"; + /** LeaseIdMismatchWithContainerOperation */ + KnownStorageErrorCode["LeaseIdMismatchWithContainerOperation"] = "LeaseIdMismatchWithContainerOperation"; + /** LeaseIdMismatchWithLeaseOperation */ + KnownStorageErrorCode["LeaseIdMismatchWithLeaseOperation"] = "LeaseIdMismatchWithLeaseOperation"; + /** LeaseIdMissing */ + KnownStorageErrorCode["LeaseIdMissing"] = "LeaseIdMissing"; + /** LeaseIsBreakingAndCannotBeAcquired */ + KnownStorageErrorCode["LeaseIsBreakingAndCannotBeAcquired"] = "LeaseIsBreakingAndCannotBeAcquired"; + /** LeaseIsBreakingAndCannotBeChanged */ + KnownStorageErrorCode["LeaseIsBreakingAndCannotBeChanged"] = "LeaseIsBreakingAndCannotBeChanged"; + /** LeaseIsBrokenAndCannotBeRenewed */ + KnownStorageErrorCode["LeaseIsBrokenAndCannotBeRenewed"] = "LeaseIsBrokenAndCannotBeRenewed"; + /** LeaseLost */ + KnownStorageErrorCode["LeaseLost"] = "LeaseLost"; + /** LeaseNotPresentWithBlobOperation */ + KnownStorageErrorCode["LeaseNotPresentWithBlobOperation"] = "LeaseNotPresentWithBlobOperation"; + /** LeaseNotPresentWithContainerOperation */ + KnownStorageErrorCode["LeaseNotPresentWithContainerOperation"] = "LeaseNotPresentWithContainerOperation"; + /** LeaseNotPresentWithLeaseOperation */ + KnownStorageErrorCode["LeaseNotPresentWithLeaseOperation"] = "LeaseNotPresentWithLeaseOperation"; + /** MaxBlobSizeConditionNotMet */ + KnownStorageErrorCode["MaxBlobSizeConditionNotMet"] = "MaxBlobSizeConditionNotMet"; + /** NoAuthenticationInformation */ + KnownStorageErrorCode["NoAuthenticationInformation"] = "NoAuthenticationInformation"; + /** NoPendingCopyOperation */ + KnownStorageErrorCode["NoPendingCopyOperation"] = "NoPendingCopyOperation"; + /** OperationNotAllowedOnIncrementalCopyBlob */ + KnownStorageErrorCode["OperationNotAllowedOnIncrementalCopyBlob"] = "OperationNotAllowedOnIncrementalCopyBlob"; + /** PendingCopyOperation */ + KnownStorageErrorCode["PendingCopyOperation"] = "PendingCopyOperation"; + /** PreviousSnapshotCannotBeNewer */ + KnownStorageErrorCode["PreviousSnapshotCannotBeNewer"] = "PreviousSnapshotCannotBeNewer"; + /** PreviousSnapshotNotFound */ + KnownStorageErrorCode["PreviousSnapshotNotFound"] = "PreviousSnapshotNotFound"; + /** PreviousSnapshotOperationNotSupported */ + KnownStorageErrorCode["PreviousSnapshotOperationNotSupported"] = "PreviousSnapshotOperationNotSupported"; + /** SequenceNumberConditionNotMet */ + KnownStorageErrorCode["SequenceNumberConditionNotMet"] = "SequenceNumberConditionNotMet"; + /** SequenceNumberIncrementTooLarge */ + KnownStorageErrorCode["SequenceNumberIncrementTooLarge"] = "SequenceNumberIncrementTooLarge"; + /** SnapshotCountExceeded */ + KnownStorageErrorCode["SnapshotCountExceeded"] = "SnapshotCountExceeded"; + /** SnapshotOperationRateExceeded */ + KnownStorageErrorCode["SnapshotOperationRateExceeded"] = "SnapshotOperationRateExceeded"; + /** SnapshotsPresent */ + KnownStorageErrorCode["SnapshotsPresent"] = "SnapshotsPresent"; + /** SourceConditionNotMet */ + KnownStorageErrorCode["SourceConditionNotMet"] = "SourceConditionNotMet"; + /** SystemInUse */ + KnownStorageErrorCode["SystemInUse"] = "SystemInUse"; + /** TargetConditionNotMet */ + KnownStorageErrorCode["TargetConditionNotMet"] = "TargetConditionNotMet"; + /** UnauthorizedBlobOverwrite */ + KnownStorageErrorCode["UnauthorizedBlobOverwrite"] = "UnauthorizedBlobOverwrite"; + /** BlobBeingRehydrated */ + KnownStorageErrorCode["BlobBeingRehydrated"] = "BlobBeingRehydrated"; + /** BlobArchived */ + KnownStorageErrorCode["BlobArchived"] = "BlobArchived"; + /** BlobNotArchived */ + KnownStorageErrorCode["BlobNotArchived"] = "BlobNotArchived"; + /** AuthorizationSourceIPMismatch */ + KnownStorageErrorCode["AuthorizationSourceIPMismatch"] = "AuthorizationSourceIPMismatch"; + /** AuthorizationProtocolMismatch */ + KnownStorageErrorCode["AuthorizationProtocolMismatch"] = "AuthorizationProtocolMismatch"; + /** AuthorizationPermissionMismatch */ + KnownStorageErrorCode["AuthorizationPermissionMismatch"] = "AuthorizationPermissionMismatch"; + /** AuthorizationServiceMismatch */ + KnownStorageErrorCode["AuthorizationServiceMismatch"] = "AuthorizationServiceMismatch"; + /** AuthorizationResourceTypeMismatch */ + KnownStorageErrorCode["AuthorizationResourceTypeMismatch"] = "AuthorizationResourceTypeMismatch"; +})(KnownStorageErrorCode || (KnownStorageErrorCode = {})); //# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js index e5d4ba248..355369e61 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/mappers.js @@ -17,24 +17,24 @@ export const BlobServiceProperties = { xmlName: "Logging", type: { name: "Composite", - className: "Logging" - } + className: "Logging", + }, }, hourMetrics: { serializedName: "HourMetrics", xmlName: "HourMetrics", type: { name: "Composite", - className: "Metrics" - } + className: "Metrics", + }, }, minuteMetrics: { serializedName: "MinuteMetrics", xmlName: "MinuteMetrics", type: { name: "Composite", - className: "Metrics" - } + className: "Metrics", + }, }, cors: { serializedName: "Cors", @@ -46,36 +46,36 @@ export const BlobServiceProperties = { element: { type: { name: "Composite", - className: "CorsRule" - } - } - } + className: "CorsRule", + }, + }, + }, }, defaultServiceVersion: { serializedName: "DefaultServiceVersion", xmlName: "DefaultServiceVersion", type: { - name: "String" - } + name: "String", + }, }, deleteRetentionPolicy: { serializedName: "DeleteRetentionPolicy", xmlName: "DeleteRetentionPolicy", type: { name: "Composite", - className: "RetentionPolicy" - } + className: "RetentionPolicy", + }, }, staticWebsite: { serializedName: "StaticWebsite", xmlName: "StaticWebsite", type: { name: "Composite", - className: "StaticWebsite" - } - } - } - } + className: "StaticWebsite", + }, + }, + }, + }, }; export const Logging = { serializedName: "Logging", @@ -88,43 +88,43 @@ export const Logging = { required: true, xmlName: "Version", type: { - name: "String" - } + name: "String", + }, }, deleteProperty: { serializedName: "Delete", required: true, xmlName: "Delete", type: { - name: "Boolean" - } + name: "Boolean", + }, }, read: { serializedName: "Read", required: true, xmlName: "Read", type: { - name: "Boolean" - } + name: "Boolean", + }, }, write: { serializedName: "Write", required: true, xmlName: "Write", type: { - name: "Boolean" - } + name: "Boolean", + }, }, retentionPolicy: { serializedName: "RetentionPolicy", xmlName: "RetentionPolicy", type: { name: "Composite", - className: "RetentionPolicy" - } - } - } - } + className: "RetentionPolicy", + }, + }, + }, + }, }; export const RetentionPolicy = { serializedName: "RetentionPolicy", @@ -137,21 +137,21 @@ export const RetentionPolicy = { required: true, xmlName: "Enabled", type: { - name: "Boolean" - } + name: "Boolean", + }, }, days: { constraints: { - InclusiveMinimum: 1 + InclusiveMinimum: 1, }, serializedName: "Days", xmlName: "Days", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const Metrics = { serializedName: "Metrics", @@ -163,34 +163,34 @@ export const Metrics = { serializedName: "Version", xmlName: "Version", type: { - name: "String" - } + name: "String", + }, }, enabled: { serializedName: "Enabled", required: true, xmlName: "Enabled", type: { - name: "Boolean" - } + name: "Boolean", + }, }, includeAPIs: { serializedName: "IncludeAPIs", xmlName: "IncludeAPIs", type: { - name: "Boolean" - } + name: "Boolean", + }, }, retentionPolicy: { serializedName: "RetentionPolicy", xmlName: "RetentionPolicy", type: { name: "Composite", - className: "RetentionPolicy" - } - } - } - } + className: "RetentionPolicy", + }, + }, + }, + }, }; export const CorsRule = { serializedName: "CorsRule", @@ -203,46 +203,46 @@ export const CorsRule = { required: true, xmlName: "AllowedOrigins", type: { - name: "String" - } + name: "String", + }, }, allowedMethods: { serializedName: "AllowedMethods", required: true, xmlName: "AllowedMethods", type: { - name: "String" - } + name: "String", + }, }, allowedHeaders: { serializedName: "AllowedHeaders", required: true, xmlName: "AllowedHeaders", type: { - name: "String" - } + name: "String", + }, }, exposedHeaders: { serializedName: "ExposedHeaders", required: true, xmlName: "ExposedHeaders", type: { - name: "String" - } + name: "String", + }, }, maxAgeInSeconds: { constraints: { - InclusiveMinimum: 0 + InclusiveMinimum: 0, }, serializedName: "MaxAgeInSeconds", required: true, xmlName: "MaxAgeInSeconds", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const StaticWebsite = { serializedName: "StaticWebsite", @@ -255,32 +255,32 @@ export const StaticWebsite = { required: true, xmlName: "Enabled", type: { - name: "Boolean" - } + name: "Boolean", + }, }, indexDocument: { serializedName: "IndexDocument", xmlName: "IndexDocument", type: { - name: "String" - } + name: "String", + }, }, errorDocument404Path: { serializedName: "ErrorDocument404Path", xmlName: "ErrorDocument404Path", type: { - name: "String" - } + name: "String", + }, }, defaultIndexDocumentPath: { serializedName: "DefaultIndexDocumentPath", xmlName: "DefaultIndexDocumentPath", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const StorageError = { serializedName: "StorageError", @@ -292,18 +292,18 @@ export const StorageError = { serializedName: "Message", xmlName: "Message", type: { - name: "String" - } + name: "String", + }, }, code: { serializedName: "Code", xmlName: "Code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobServiceStatistics = { serializedName: "BlobServiceStatistics", @@ -317,11 +317,11 @@ export const BlobServiceStatistics = { xmlName: "GeoReplication", type: { name: "Composite", - className: "GeoReplication" - } - } - } - } + className: "GeoReplication", + }, + }, + }, + }, }; export const GeoReplication = { serializedName: "GeoReplication", @@ -335,19 +335,19 @@ export const GeoReplication = { xmlName: "Status", type: { name: "Enum", - allowedValues: ["live", "bootstrap", "unavailable"] - } + allowedValues: ["live", "bootstrap", "unavailable"], + }, }, lastSyncOn: { serializedName: "LastSyncTime", required: true, xmlName: "LastSyncTime", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const ListContainersSegmentResponse = { serializedName: "ListContainersSegmentResponse", @@ -362,29 +362,29 @@ export const ListContainersSegmentResponse = { xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { - name: "String" - } + name: "String", + }, }, marker: { serializedName: "Marker", xmlName: "Marker", type: { - name: "String" - } + name: "String", + }, }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { - name: "Number" - } + name: "Number", + }, }, containerItems: { serializedName: "ContainerItems", @@ -397,20 +397,20 @@ export const ListContainersSegmentResponse = { element: { type: { name: "Composite", - className: "ContainerItem" - } - } - } + className: "ContainerItem", + }, + }, + }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerItem = { serializedName: "ContainerItem", @@ -424,41 +424,41 @@ export const ContainerItem = { required: true, xmlName: "Name", type: { - name: "String" - } + name: "String", + }, }, deleted: { serializedName: "Deleted", xmlName: "Deleted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, version: { serializedName: "Version", xmlName: "Version", type: { - name: "String" - } + name: "String", + }, }, properties: { serializedName: "Properties", xmlName: "Properties", type: { name: "Composite", - className: "ContainerProperties" - } + className: "ContainerProperties", + }, }, metadata: { serializedName: "Metadata", xmlName: "Metadata", type: { name: "Dictionary", - value: { type: { name: "String" } } - } - } - } - } + value: { type: { name: "String" } }, + }, + }, + }, + }, }; export const ContainerProperties = { serializedName: "ContainerProperties", @@ -471,24 +471,24 @@ export const ContainerProperties = { required: true, xmlName: "Last-Modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "Etag", required: true, xmlName: "Etag", type: { - name: "String" - } + name: "String", + }, }, leaseStatus: { serializedName: "LeaseStatus", xmlName: "LeaseStatus", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, leaseState: { serializedName: "LeaseState", @@ -500,77 +500,77 @@ export const ContainerProperties = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseDuration: { serializedName: "LeaseDuration", xmlName: "LeaseDuration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, publicAccess: { serializedName: "PublicAccess", xmlName: "PublicAccess", type: { name: "Enum", - allowedValues: ["container", "blob"] - } + allowedValues: ["container", "blob"], + }, }, hasImmutabilityPolicy: { serializedName: "HasImmutabilityPolicy", xmlName: "HasImmutabilityPolicy", type: { - name: "Boolean" - } + name: "Boolean", + }, }, hasLegalHold: { serializedName: "HasLegalHold", xmlName: "HasLegalHold", type: { - name: "Boolean" - } + name: "Boolean", + }, }, defaultEncryptionScope: { serializedName: "DefaultEncryptionScope", xmlName: "DefaultEncryptionScope", type: { - name: "String" - } + name: "String", + }, }, preventEncryptionScopeOverride: { serializedName: "DenyEncryptionScopeOverride", xmlName: "DenyEncryptionScopeOverride", type: { - name: "Boolean" - } + name: "Boolean", + }, }, deletedOn: { serializedName: "DeletedTime", xmlName: "DeletedTime", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, remainingRetentionDays: { serializedName: "RemainingRetentionDays", xmlName: "RemainingRetentionDays", type: { - name: "Number" - } + name: "Number", + }, }, isImmutableStorageWithVersioningEnabled: { serializedName: "ImmutableStorageWithVersioningEnabled", xmlName: "ImmutableStorageWithVersioningEnabled", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const KeyInfo = { serializedName: "KeyInfo", @@ -583,19 +583,19 @@ export const KeyInfo = { required: true, xmlName: "Start", type: { - name: "String" - } + name: "String", + }, }, expiresOn: { serializedName: "Expiry", required: true, xmlName: "Expiry", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const UserDelegationKey = { serializedName: "UserDelegationKey", @@ -608,59 +608,59 @@ export const UserDelegationKey = { required: true, xmlName: "SignedOid", type: { - name: "String" - } + name: "String", + }, }, signedTenantId: { serializedName: "SignedTid", required: true, xmlName: "SignedTid", type: { - name: "String" - } + name: "String", + }, }, signedStartsOn: { serializedName: "SignedStart", required: true, xmlName: "SignedStart", type: { - name: "String" - } + name: "String", + }, }, signedExpiresOn: { serializedName: "SignedExpiry", required: true, xmlName: "SignedExpiry", type: { - name: "String" - } + name: "String", + }, }, signedService: { serializedName: "SignedService", required: true, xmlName: "SignedService", type: { - name: "String" - } + name: "String", + }, }, signedVersion: { serializedName: "SignedVersion", required: true, xmlName: "SignedVersion", type: { - name: "String" - } + name: "String", + }, }, value: { serializedName: "Value", required: true, xmlName: "Value", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const FilterBlobSegment = { serializedName: "FilterBlobSegment", @@ -675,16 +675,16 @@ export const FilterBlobSegment = { xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, where: { serializedName: "Where", required: true, xmlName: "Where", type: { - name: "String" - } + name: "String", + }, }, blobs: { serializedName: "Blobs", @@ -697,20 +697,20 @@ export const FilterBlobSegment = { element: { type: { name: "Composite", - className: "FilterBlobItem" - } - } - } + className: "FilterBlobItem", + }, + }, + }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const FilterBlobItem = { serializedName: "FilterBlobItem", @@ -724,27 +724,27 @@ export const FilterBlobItem = { required: true, xmlName: "Name", type: { - name: "String" - } + name: "String", + }, }, containerName: { serializedName: "ContainerName", required: true, xmlName: "ContainerName", type: { - name: "String" - } + name: "String", + }, }, tags: { serializedName: "Tags", xmlName: "Tags", type: { name: "Composite", - className: "BlobTags" - } - } - } - } + className: "BlobTags", + }, + }, + }, + }, }; export const BlobTags = { serializedName: "BlobTags", @@ -764,13 +764,13 @@ export const BlobTags = { element: { type: { name: "Composite", - className: "BlobTag" - } - } - } - } - } - } + className: "BlobTag", + }, + }, + }, + }, + }, + }, }; export const BlobTag = { serializedName: "BlobTag", @@ -784,19 +784,19 @@ export const BlobTag = { required: true, xmlName: "Key", type: { - name: "String" - } + name: "String", + }, }, value: { serializedName: "Value", required: true, xmlName: "Value", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const SignedIdentifier = { serializedName: "SignedIdentifier", @@ -810,19 +810,19 @@ export const SignedIdentifier = { required: true, xmlName: "Id", type: { - name: "String" - } + name: "String", + }, }, accessPolicy: { serializedName: "AccessPolicy", xmlName: "AccessPolicy", type: { name: "Composite", - className: "AccessPolicy" - } - } - } - } + className: "AccessPolicy", + }, + }, + }, + }, }; export const AccessPolicy = { serializedName: "AccessPolicy", @@ -834,25 +834,25 @@ export const AccessPolicy = { serializedName: "Start", xmlName: "Start", type: { - name: "String" - } + name: "String", + }, }, expiresOn: { serializedName: "Expiry", xmlName: "Expiry", type: { - name: "String" - } + name: "String", + }, }, permissions: { serializedName: "Permission", xmlName: "Permission", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ListBlobsFlatSegmentResponse = { serializedName: "ListBlobsFlatSegmentResponse", @@ -867,8 +867,8 @@ export const ListBlobsFlatSegmentResponse = { xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, containerName: { serializedName: "ContainerName", @@ -876,47 +876,47 @@ export const ListBlobsFlatSegmentResponse = { xmlName: "ContainerName", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { - name: "String" - } + name: "String", + }, }, marker: { serializedName: "Marker", xmlName: "Marker", type: { - name: "String" - } + name: "String", + }, }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { - name: "Number" - } + name: "Number", + }, }, segment: { serializedName: "Segment", xmlName: "Blobs", type: { name: "Composite", - className: "BlobFlatListSegment" - } + className: "BlobFlatListSegment", + }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobFlatListSegment = { serializedName: "BlobFlatListSegment", @@ -935,13 +935,13 @@ export const BlobFlatListSegment = { element: { type: { name: "Composite", - className: "BlobItemInternal" - } - } - } - } - } - } + className: "BlobItemInternal", + }, + }, + }, + }, + }, + }, }; export const BlobItemInternal = { serializedName: "BlobItemInternal", @@ -955,80 +955,80 @@ export const BlobItemInternal = { xmlName: "Name", type: { name: "Composite", - className: "BlobName" - } + className: "BlobName", + }, }, deleted: { serializedName: "Deleted", required: true, xmlName: "Deleted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, snapshot: { serializedName: "Snapshot", required: true, xmlName: "Snapshot", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "VersionId", xmlName: "VersionId", type: { - name: "String" - } + name: "String", + }, }, isCurrentVersion: { serializedName: "IsCurrentVersion", xmlName: "IsCurrentVersion", type: { - name: "Boolean" - } + name: "Boolean", + }, }, properties: { serializedName: "Properties", xmlName: "Properties", type: { name: "Composite", - className: "BlobPropertiesInternal" - } + className: "BlobPropertiesInternal", + }, }, metadata: { serializedName: "Metadata", xmlName: "Metadata", type: { name: "Dictionary", - value: { type: { name: "String" } } - } + value: { type: { name: "String" } }, + }, }, blobTags: { serializedName: "BlobTags", xmlName: "Tags", type: { name: "Composite", - className: "BlobTags" - } + className: "BlobTags", + }, }, objectReplicationMetadata: { serializedName: "ObjectReplicationMetadata", xmlName: "OrMetadata", type: { name: "Dictionary", - value: { type: { name: "String" } } - } + value: { type: { name: "String" } }, + }, }, hasVersionsOnly: { serializedName: "HasVersionsOnly", xmlName: "HasVersionsOnly", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const BlobName = { serializedName: "BlobName", @@ -1041,19 +1041,19 @@ export const BlobName = { xmlName: "Encoded", xmlIsAttribute: true, type: { - name: "Boolean" - } + name: "Boolean", + }, }, content: { serializedName: "content", xmlName: "content", xmlIsMsText: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobPropertiesInternal = { serializedName: "BlobPropertiesInternal", @@ -1066,96 +1066,96 @@ export const BlobPropertiesInternal = { serializedName: "Creation-Time", xmlName: "Creation-Time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, lastModified: { serializedName: "Last-Modified", required: true, xmlName: "Last-Modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "Etag", required: true, xmlName: "Etag", type: { - name: "String" - } + name: "String", + }, }, contentLength: { serializedName: "Content-Length", xmlName: "Content-Length", type: { - name: "Number" - } + name: "Number", + }, }, contentType: { serializedName: "Content-Type", xmlName: "Content-Type", type: { - name: "String" - } + name: "String", + }, }, contentEncoding: { serializedName: "Content-Encoding", xmlName: "Content-Encoding", type: { - name: "String" - } + name: "String", + }, }, contentLanguage: { serializedName: "Content-Language", xmlName: "Content-Language", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "Content-MD5", xmlName: "Content-MD5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, contentDisposition: { serializedName: "Content-Disposition", xmlName: "Content-Disposition", type: { - name: "String" - } + name: "String", + }, }, cacheControl: { serializedName: "Cache-Control", xmlName: "Cache-Control", type: { - name: "String" - } + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, blobType: { serializedName: "BlobType", xmlName: "BlobType", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, leaseStatus: { serializedName: "LeaseStatus", xmlName: "LeaseStatus", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, leaseState: { serializedName: "LeaseState", @@ -1167,95 +1167,95 @@ export const BlobPropertiesInternal = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseDuration: { serializedName: "LeaseDuration", xmlName: "LeaseDuration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, copyId: { serializedName: "CopyId", xmlName: "CopyId", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "CopyStatus", xmlName: "CopyStatus", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, copySource: { serializedName: "CopySource", xmlName: "CopySource", type: { - name: "String" - } + name: "String", + }, }, copyProgress: { serializedName: "CopyProgress", xmlName: "CopyProgress", type: { - name: "String" - } + name: "String", + }, }, copyCompletedOn: { serializedName: "CopyCompletionTime", xmlName: "CopyCompletionTime", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyStatusDescription: { serializedName: "CopyStatusDescription", xmlName: "CopyStatusDescription", type: { - name: "String" - } + name: "String", + }, }, serverEncrypted: { serializedName: "ServerEncrypted", xmlName: "ServerEncrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, incrementalCopy: { serializedName: "IncrementalCopy", xmlName: "IncrementalCopy", type: { - name: "Boolean" - } + name: "Boolean", + }, }, destinationSnapshot: { serializedName: "DestinationSnapshot", xmlName: "DestinationSnapshot", type: { - name: "String" - } + name: "String", + }, }, deletedOn: { serializedName: "DeletedTime", xmlName: "DeletedTime", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, remainingRetentionDays: { serializedName: "RemainingRetentionDays", xmlName: "RemainingRetentionDays", type: { - name: "Number" - } + name: "Number", + }, }, accessTier: { serializedName: "AccessTier", @@ -1277,16 +1277,16 @@ export const BlobPropertiesInternal = { "Hot", "Cool", "Archive", - "Cold" - ] - } + "Cold", + ], + }, }, accessTierInferred: { serializedName: "AccessTierInferred", xmlName: "AccessTierInferred", type: { - name: "Boolean" - } + name: "Boolean", + }, }, archiveStatus: { serializedName: "ArchiveStatus", @@ -1296,91 +1296,91 @@ export const BlobPropertiesInternal = { allowedValues: [ "rehydrate-pending-to-hot", "rehydrate-pending-to-cool", - "rehydrate-pending-to-cold" - ] - } + "rehydrate-pending-to-cold", + ], + }, }, customerProvidedKeySha256: { serializedName: "CustomerProvidedKeySha256", xmlName: "CustomerProvidedKeySha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "EncryptionScope", xmlName: "EncryptionScope", type: { - name: "String" - } + name: "String", + }, }, accessTierChangedOn: { serializedName: "AccessTierChangeTime", xmlName: "AccessTierChangeTime", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, tagCount: { serializedName: "TagCount", xmlName: "TagCount", type: { - name: "Number" - } + name: "Number", + }, }, expiresOn: { serializedName: "Expiry-Time", xmlName: "Expiry-Time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isSealed: { serializedName: "Sealed", xmlName: "Sealed", type: { - name: "Boolean" - } + name: "Boolean", + }, }, rehydratePriority: { serializedName: "RehydratePriority", xmlName: "RehydratePriority", type: { name: "Enum", - allowedValues: ["High", "Standard"] - } + allowedValues: ["High", "Standard"], + }, }, lastAccessedOn: { serializedName: "LastAccessTime", xmlName: "LastAccessTime", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyExpiresOn: { serializedName: "ImmutabilityPolicyUntilDate", xmlName: "ImmutabilityPolicyUntilDate", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyMode: { serializedName: "ImmutabilityPolicyMode", xmlName: "ImmutabilityPolicyMode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, }, legalHold: { serializedName: "LegalHold", xmlName: "LegalHold", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const ListBlobsHierarchySegmentResponse = { serializedName: "ListBlobsHierarchySegmentResponse", @@ -1395,8 +1395,8 @@ export const ListBlobsHierarchySegmentResponse = { xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, containerName: { serializedName: "ContainerName", @@ -1404,54 +1404,54 @@ export const ListBlobsHierarchySegmentResponse = { xmlName: "ContainerName", xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { - name: "String" - } + name: "String", + }, }, marker: { serializedName: "Marker", xmlName: "Marker", type: { - name: "String" - } + name: "String", + }, }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { - name: "Number" - } + name: "Number", + }, }, delimiter: { serializedName: "Delimiter", xmlName: "Delimiter", type: { - name: "String" - } + name: "String", + }, }, segment: { serializedName: "Segment", xmlName: "Blobs", type: { name: "Composite", - className: "BlobHierarchyListSegment" - } + className: "BlobHierarchyListSegment", + }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobHierarchyListSegment = { serializedName: "BlobHierarchyListSegment", @@ -1469,10 +1469,10 @@ export const BlobHierarchyListSegment = { element: { type: { name: "Composite", - className: "BlobPrefix" - } - } - } + className: "BlobPrefix", + }, + }, + }, }, blobItems: { serializedName: "BlobItems", @@ -1484,13 +1484,13 @@ export const BlobHierarchyListSegment = { element: { type: { name: "Composite", - className: "BlobItemInternal" - } - } - } - } - } - } + className: "BlobItemInternal", + }, + }, + }, + }, + }, + }, }; export const BlobPrefix = { serializedName: "BlobPrefix", @@ -1503,11 +1503,11 @@ export const BlobPrefix = { xmlName: "Name", type: { name: "Composite", - className: "BlobName" - } - } - } - } + className: "BlobName", + }, + }, + }, + }, }; export const BlockLookupList = { serializedName: "BlockLookupList", @@ -1524,10 +1524,10 @@ export const BlockLookupList = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, uncommitted: { serializedName: "Uncommitted", @@ -1537,10 +1537,10 @@ export const BlockLookupList = { name: "Sequence", element: { type: { - name: "String" - } - } - } + name: "String", + }, + }, + }, }, latest: { serializedName: "Latest", @@ -1550,13 +1550,13 @@ export const BlockLookupList = { name: "Sequence", element: { type: { - name: "String" - } - } - } - } - } - } + name: "String", + }, + }, + }, + }, + }, + }, }; export const BlockList = { serializedName: "BlockList", @@ -1574,10 +1574,10 @@ export const BlockList = { element: { type: { name: "Composite", - className: "Block" - } - } - } + className: "Block", + }, + }, + }, }, uncommittedBlocks: { serializedName: "UncommittedBlocks", @@ -1589,13 +1589,13 @@ export const BlockList = { element: { type: { name: "Composite", - className: "Block" - } - } - } - } - } - } + className: "Block", + }, + }, + }, + }, + }, + }, }; export const Block = { serializedName: "Block", @@ -1608,19 +1608,19 @@ export const Block = { required: true, xmlName: "Name", type: { - name: "String" - } + name: "String", + }, }, size: { serializedName: "Size", required: true, xmlName: "Size", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const PageList = { serializedName: "PageList", @@ -1637,10 +1637,10 @@ export const PageList = { element: { type: { name: "Composite", - className: "PageRange" - } - } - } + className: "PageRange", + }, + }, + }, }, clearRange: { serializedName: "ClearRange", @@ -1651,20 +1651,20 @@ export const PageList = { element: { type: { name: "Composite", - className: "ClearRange" - } - } - } + className: "ClearRange", + }, + }, + }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageRange = { serializedName: "PageRange", @@ -1678,19 +1678,19 @@ export const PageRange = { required: true, xmlName: "Start", type: { - name: "Number" - } + name: "Number", + }, }, end: { serializedName: "End", required: true, xmlName: "End", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const ClearRange = { serializedName: "ClearRange", @@ -1704,19 +1704,19 @@ export const ClearRange = { required: true, xmlName: "Start", type: { - name: "Number" - } + name: "Number", + }, }, end: { serializedName: "End", required: true, xmlName: "End", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const QueryRequest = { serializedName: "QueryRequest", @@ -1730,35 +1730,35 @@ export const QueryRequest = { required: true, xmlName: "QueryType", type: { - name: "String" - } + name: "String", + }, }, expression: { serializedName: "Expression", required: true, xmlName: "Expression", type: { - name: "String" - } + name: "String", + }, }, inputSerialization: { serializedName: "InputSerialization", xmlName: "InputSerialization", type: { name: "Composite", - className: "QuerySerialization" - } + className: "QuerySerialization", + }, }, outputSerialization: { serializedName: "OutputSerialization", xmlName: "OutputSerialization", type: { name: "Composite", - className: "QuerySerialization" - } - } - } - } + className: "QuerySerialization", + }, + }, + }, + }, }; export const QuerySerialization = { serializedName: "QuerySerialization", @@ -1771,11 +1771,11 @@ export const QuerySerialization = { xmlName: "Format", type: { name: "Composite", - className: "QueryFormat" - } - } - } - } + className: "QueryFormat", + }, + }, + }, + }, }; export const QueryFormat = { serializedName: "QueryFormat", @@ -1789,42 +1789,43 @@ export const QueryFormat = { xmlName: "Type", type: { name: "Enum", - allowedValues: ["delimited", "json", "arrow", "parquet"] - } + allowedValues: ["delimited", "json", "arrow", "parquet"], + }, }, delimitedTextConfiguration: { serializedName: "DelimitedTextConfiguration", xmlName: "DelimitedTextConfiguration", type: { name: "Composite", - className: "DelimitedTextConfiguration" - } + className: "DelimitedTextConfiguration", + }, }, jsonTextConfiguration: { serializedName: "JsonTextConfiguration", xmlName: "JsonTextConfiguration", type: { name: "Composite", - className: "JsonTextConfiguration" - } + className: "JsonTextConfiguration", + }, }, arrowConfiguration: { serializedName: "ArrowConfiguration", xmlName: "ArrowConfiguration", type: { name: "Composite", - className: "ArrowConfiguration" - } + className: "ArrowConfiguration", + }, }, parquetTextConfiguration: { serializedName: "ParquetTextConfiguration", xmlName: "ParquetTextConfiguration", type: { - name: "any" - } - } - } - } + name: "Dictionary", + value: { type: { name: "any" } }, + }, + }, + }, + }, }; export const DelimitedTextConfiguration = { serializedName: "DelimitedTextConfiguration", @@ -1837,39 +1838,39 @@ export const DelimitedTextConfiguration = { serializedName: "ColumnSeparator", xmlName: "ColumnSeparator", type: { - name: "String" - } + name: "String", + }, }, fieldQuote: { serializedName: "FieldQuote", xmlName: "FieldQuote", type: { - name: "String" - } + name: "String", + }, }, recordSeparator: { serializedName: "RecordSeparator", xmlName: "RecordSeparator", type: { - name: "String" - } + name: "String", + }, }, escapeChar: { serializedName: "EscapeChar", xmlName: "EscapeChar", type: { - name: "String" - } + name: "String", + }, }, headersPresent: { serializedName: "HeadersPresent", xmlName: "HasHeaders", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const JsonTextConfiguration = { serializedName: "JsonTextConfiguration", @@ -1882,11 +1883,11 @@ export const JsonTextConfiguration = { serializedName: "RecordSeparator", xmlName: "RecordSeparator", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ArrowConfiguration = { serializedName: "ArrowConfiguration", @@ -1906,13 +1907,13 @@ export const ArrowConfiguration = { element: { type: { name: "Composite", - className: "ArrowField" - } - } - } - } - } - } + className: "ArrowField", + }, + }, + }, + }, + }, + }, }; export const ArrowField = { serializedName: "ArrowField", @@ -1926,32 +1927,32 @@ export const ArrowField = { required: true, xmlName: "Type", type: { - name: "String" - } + name: "String", + }, }, name: { serializedName: "Name", xmlName: "Name", type: { - name: "String" - } + name: "String", + }, }, precision: { serializedName: "Precision", xmlName: "Precision", type: { - name: "Number" - } + name: "Number", + }, }, scale: { serializedName: "Scale", xmlName: "Scale", type: { - name: "Number" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; export const ServiceSetPropertiesHeaders = { serializedName: "Service_setPropertiesHeaders", @@ -1963,32 +1964,32 @@ export const ServiceSetPropertiesHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceSetPropertiesExceptionHeaders = { serializedName: "Service_setPropertiesExceptionHeaders", @@ -2000,11 +2001,11 @@ export const ServiceSetPropertiesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceGetPropertiesHeaders = { serializedName: "Service_getPropertiesHeaders", @@ -2016,32 +2017,32 @@ export const ServiceGetPropertiesHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceGetPropertiesExceptionHeaders = { serializedName: "Service_getPropertiesExceptionHeaders", @@ -2053,11 +2054,11 @@ export const ServiceGetPropertiesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceGetStatisticsHeaders = { serializedName: "Service_getStatisticsHeaders", @@ -2069,39 +2070,39 @@ export const ServiceGetStatisticsHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceGetStatisticsExceptionHeaders = { serializedName: "Service_getStatisticsExceptionHeaders", @@ -2113,11 +2114,11 @@ export const ServiceGetStatisticsExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceListContainersSegmentHeaders = { serializedName: "Service_listContainersSegmentHeaders", @@ -2129,32 +2130,32 @@ export const ServiceListContainersSegmentHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceListContainersSegmentExceptionHeaders = { serializedName: "Service_listContainersSegmentExceptionHeaders", @@ -2166,12 +2167,12 @@ export const ServiceListContainersSegmentExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } -}; + name: "String", + }, + }, + }, + }, +}; export const ServiceGetUserDelegationKeyHeaders = { serializedName: "Service_getUserDelegationKeyHeaders", type: { @@ -2182,39 +2183,39 @@ export const ServiceGetUserDelegationKeyHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceGetUserDelegationKeyExceptionHeaders = { serializedName: "Service_getUserDelegationKeyExceptionHeaders", @@ -2226,11 +2227,11 @@ export const ServiceGetUserDelegationKeyExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceGetAccountInfoHeaders = { serializedName: "Service_getAccountInfoHeaders", @@ -2242,29 +2243,29 @@ export const ServiceGetAccountInfoHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, skuName: { serializedName: "x-ms-sku-name", @@ -2276,9 +2277,9 @@ export const ServiceGetAccountInfoHeaders = { "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", - "Premium_LRS" - ] - } + "Premium_LRS", + ], + }, }, accountKind: { serializedName: "x-ms-account-kind", @@ -2290,26 +2291,26 @@ export const ServiceGetAccountInfoHeaders = { "BlobStorage", "StorageV2", "FileStorage", - "BlockBlobStorage" - ] - } + "BlockBlobStorage", + ], + }, }, isHierarchicalNamespaceEnabled: { serializedName: "x-ms-is-hns-enabled", xmlName: "x-ms-is-hns-enabled", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceGetAccountInfoExceptionHeaders = { serializedName: "Service_getAccountInfoExceptionHeaders", @@ -2321,11 +2322,11 @@ export const ServiceGetAccountInfoExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceSubmitBatchHeaders = { serializedName: "Service_submitBatchHeaders", @@ -2337,39 +2338,39 @@ export const ServiceSubmitBatchHeaders = { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceSubmitBatchExceptionHeaders = { serializedName: "Service_submitBatchExceptionHeaders", @@ -2381,11 +2382,11 @@ export const ServiceSubmitBatchExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceFilterBlobsHeaders = { serializedName: "Service_filterBlobsHeaders", @@ -2397,39 +2398,39 @@ export const ServiceFilterBlobsHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ServiceFilterBlobsExceptionHeaders = { serializedName: "Service_filterBlobsExceptionHeaders", @@ -2441,11 +2442,11 @@ export const ServiceFilterBlobsExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerCreateHeaders = { serializedName: "Container_createHeaders", @@ -2457,53 +2458,53 @@ export const ContainerCreateHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerCreateExceptionHeaders = { serializedName: "Container_createExceptionHeaders", @@ -2515,11 +2516,11 @@ export const ContainerCreateExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerGetPropertiesHeaders = { serializedName: "Container_getPropertiesHeaders", @@ -2529,34 +2530,34 @@ export const ContainerGetPropertiesHeaders = { modelProperties: { metadata: { serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-meta-" }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, leaseState: { serializedName: "x-ms-lease-state", @@ -2568,98 +2569,98 @@ export const ContainerGetPropertiesHeaders = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobPublicAccess: { serializedName: "x-ms-blob-public-access", xmlName: "x-ms-blob-public-access", type: { name: "Enum", - allowedValues: ["container", "blob"] - } + allowedValues: ["container", "blob"], + }, }, hasImmutabilityPolicy: { serializedName: "x-ms-has-immutability-policy", xmlName: "x-ms-has-immutability-policy", type: { - name: "Boolean" - } + name: "Boolean", + }, }, hasLegalHold: { serializedName: "x-ms-has-legal-hold", xmlName: "x-ms-has-legal-hold", type: { - name: "Boolean" - } + name: "Boolean", + }, }, defaultEncryptionScope: { serializedName: "x-ms-default-encryption-scope", xmlName: "x-ms-default-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, denyEncryptionScopeOverride: { serializedName: "x-ms-deny-encryption-scope-override", xmlName: "x-ms-deny-encryption-scope-override", type: { - name: "Boolean" - } + name: "Boolean", + }, }, isImmutableStorageWithVersioningEnabled: { serializedName: "x-ms-immutable-storage-with-versioning-enabled", xmlName: "x-ms-immutable-storage-with-versioning-enabled", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerGetPropertiesExceptionHeaders = { serializedName: "Container_getPropertiesExceptionHeaders", @@ -2671,11 +2672,11 @@ export const ContainerGetPropertiesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerDeleteHeaders = { serializedName: "Container_deleteHeaders", @@ -2687,39 +2688,39 @@ export const ContainerDeleteHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerDeleteExceptionHeaders = { serializedName: "Container_deleteExceptionHeaders", @@ -2731,11 +2732,11 @@ export const ContainerDeleteExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerSetMetadataHeaders = { serializedName: "Container_setMetadataHeaders", @@ -2747,53 +2748,53 @@ export const ContainerSetMetadataHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerSetMetadataExceptionHeaders = { serializedName: "Container_setMetadataExceptionHeaders", @@ -2805,11 +2806,11 @@ export const ContainerSetMetadataExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerGetAccessPolicyHeaders = { serializedName: "Container_getAccessPolicyHeaders", @@ -2822,60 +2823,60 @@ export const ContainerGetAccessPolicyHeaders = { xmlName: "x-ms-blob-public-access", type: { name: "Enum", - allowedValues: ["container", "blob"] - } + allowedValues: ["container", "blob"], + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerGetAccessPolicyExceptionHeaders = { serializedName: "Container_getAccessPolicyExceptionHeaders", @@ -2887,11 +2888,11 @@ export const ContainerGetAccessPolicyExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerSetAccessPolicyHeaders = { serializedName: "Container_setAccessPolicyHeaders", @@ -2903,53 +2904,53 @@ export const ContainerSetAccessPolicyHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerSetAccessPolicyExceptionHeaders = { serializedName: "Container_setAccessPolicyExceptionHeaders", @@ -2961,11 +2962,11 @@ export const ContainerSetAccessPolicyExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerRestoreHeaders = { serializedName: "Container_restoreHeaders", @@ -2977,39 +2978,39 @@ export const ContainerRestoreHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerRestoreExceptionHeaders = { serializedName: "Container_restoreExceptionHeaders", @@ -3021,11 +3022,11 @@ export const ContainerRestoreExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerRenameHeaders = { serializedName: "Container_renameHeaders", @@ -3037,39 +3038,39 @@ export const ContainerRenameHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerRenameExceptionHeaders = { serializedName: "Container_renameExceptionHeaders", @@ -3081,11 +3082,11 @@ export const ContainerRenameExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerSubmitBatchHeaders = { serializedName: "Container_submitBatchHeaders", @@ -3097,25 +3098,25 @@ export const ContainerSubmitBatchHeaders = { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerSubmitBatchExceptionHeaders = { serializedName: "Container_submitBatchExceptionHeaders", @@ -3127,11 +3128,11 @@ export const ContainerSubmitBatchExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerFilterBlobsHeaders = { serializedName: "Container_filterBlobsHeaders", @@ -3143,32 +3144,32 @@ export const ContainerFilterBlobsHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const ContainerFilterBlobsExceptionHeaders = { serializedName: "Container_filterBlobsExceptionHeaders", @@ -3180,11 +3181,11 @@ export const ContainerFilterBlobsExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerAcquireLeaseHeaders = { serializedName: "Container_acquireLeaseHeaders", @@ -3196,53 +3197,53 @@ export const ContainerAcquireLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const ContainerAcquireLeaseExceptionHeaders = { serializedName: "Container_acquireLeaseExceptionHeaders", @@ -3254,11 +3255,11 @@ export const ContainerAcquireLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerReleaseLeaseHeaders = { serializedName: "Container_releaseLeaseHeaders", @@ -3270,46 +3271,46 @@ export const ContainerReleaseLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const ContainerReleaseLeaseExceptionHeaders = { serializedName: "Container_releaseLeaseExceptionHeaders", @@ -3321,11 +3322,11 @@ export const ContainerReleaseLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerRenewLeaseHeaders = { serializedName: "Container_renewLeaseHeaders", @@ -3337,53 +3338,53 @@ export const ContainerRenewLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const ContainerRenewLeaseExceptionHeaders = { serializedName: "Container_renewLeaseExceptionHeaders", @@ -3395,11 +3396,11 @@ export const ContainerRenewLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerBreakLeaseHeaders = { serializedName: "Container_breakLeaseHeaders", @@ -3411,53 +3412,53 @@ export const ContainerBreakLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseTime: { serializedName: "x-ms-lease-time", xmlName: "x-ms-lease-time", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const ContainerBreakLeaseExceptionHeaders = { serializedName: "Container_breakLeaseExceptionHeaders", @@ -3469,11 +3470,11 @@ export const ContainerBreakLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerChangeLeaseHeaders = { serializedName: "Container_changeLeaseHeaders", @@ -3485,53 +3486,53 @@ export const ContainerChangeLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const ContainerChangeLeaseExceptionHeaders = { serializedName: "Container_changeLeaseExceptionHeaders", @@ -3543,11 +3544,11 @@ export const ContainerChangeLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerListBlobFlatSegmentHeaders = { serializedName: "Container_listBlobFlatSegmentHeaders", @@ -3559,46 +3560,46 @@ export const ContainerListBlobFlatSegmentHeaders = { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerListBlobFlatSegmentExceptionHeaders = { serializedName: "Container_listBlobFlatSegmentExceptionHeaders", @@ -3610,11 +3611,11 @@ export const ContainerListBlobFlatSegmentExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerListBlobHierarchySegmentHeaders = { serializedName: "Container_listBlobHierarchySegmentHeaders", @@ -3626,46 +3627,46 @@ export const ContainerListBlobHierarchySegmentHeaders = { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerListBlobHierarchySegmentExceptionHeaders = { serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", @@ -3677,11 +3678,11 @@ export const ContainerListBlobHierarchySegmentExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const ContainerGetAccountInfoHeaders = { serializedName: "Container_getAccountInfoHeaders", @@ -3693,29 +3694,29 @@ export const ContainerGetAccountInfoHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, skuName: { serializedName: "x-ms-sku-name", @@ -3727,9 +3728,9 @@ export const ContainerGetAccountInfoHeaders = { "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", - "Premium_LRS" - ] - } + "Premium_LRS", + ], + }, }, accountKind: { serializedName: "x-ms-account-kind", @@ -3741,12 +3742,12 @@ export const ContainerGetAccountInfoHeaders = { "BlobStorage", "StorageV2", "FileStorage", - "BlockBlobStorage" - ] - } - } - } - } + "BlockBlobStorage", + ], + }, + }, + }, + }, }; export const ContainerGetAccountInfoExceptionHeaders = { serializedName: "Container_getAccountInfoExceptionHeaders", @@ -3758,11 +3759,11 @@ export const ContainerGetAccountInfoExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobDownloadHeaders = { serializedName: "Blob_downloadHeaders", @@ -3774,169 +3775,169 @@ export const BlobDownloadHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, createdOn: { serializedName: "x-ms-creation-time", xmlName: "x-ms-creation-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, metadata: { serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-meta-" }, objectReplicationPolicyId: { serializedName: "x-ms-or-policy-id", xmlName: "x-ms-or-policy-id", type: { - name: "String" - } + name: "String", + }, }, objectReplicationRules: { serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", xmlName: "x-ms-or", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-or-" }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { - name: "Number" - } + name: "Number", + }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, contentRange: { serializedName: "content-range", xmlName: "content-range", type: { - name: "String" - } + name: "String", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { - name: "String" - } + name: "String", + }, }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { - name: "String" - } + name: "String", + }, }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { - name: "String" - } + name: "String", + }, }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { - name: "String" - } + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, copyCompletedOn: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { - name: "String" - } + name: "String", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { - name: "String" - } + name: "String", + }, }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, leaseState: { serializedName: "x-ms-lease-state", @@ -3948,161 +3949,161 @@ export const BlobDownloadHeaders = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, isCurrentVersion: { serializedName: "x-ms-is-current-version", xmlName: "x-ms-is-current-version", type: { - name: "Boolean" - } + name: "Boolean", + }, }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, blobContentMD5: { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, tagCount: { serializedName: "x-ms-tag-count", xmlName: "x-ms-tag-count", type: { - name: "Number" - } + name: "Number", + }, }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { - name: "Boolean" - } + name: "Boolean", + }, }, lastAccessed: { serializedName: "x-ms-last-access-time", xmlName: "x-ms-last-access-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyExpiresOn: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } + name: "String", + }, }, contentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } - } - } - } + name: "ByteArray", + }, + }, + }, + }, }; export const BlobDownloadExceptionHeaders = { serializedName: "Blob_downloadExceptionHeaders", @@ -4114,12 +4115,12 @@ export const BlobDownloadExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } -}; + name: "String", + }, + }, + }, + }, +}; export const BlobGetPropertiesHeaders = { serializedName: "Blob_getPropertiesHeaders", type: { @@ -4130,113 +4131,113 @@ export const BlobGetPropertiesHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, createdOn: { serializedName: "x-ms-creation-time", xmlName: "x-ms-creation-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, metadata: { serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-meta-" }, objectReplicationPolicyId: { serializedName: "x-ms-or-policy-id", xmlName: "x-ms-or-policy-id", type: { - name: "String" - } + name: "String", + }, }, objectReplicationRules: { serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", xmlName: "x-ms-or", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-or-" }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, copyCompletedOn: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { - name: "String" - } + name: "String", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { - name: "String" - } + name: "String", + }, }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, isIncrementalCopy: { serializedName: "x-ms-incremental-copy", xmlName: "x-ms-incremental-copy", type: { - name: "Boolean" - } + name: "Boolean", + }, }, destinationSnapshot: { serializedName: "x-ms-copy-destination-snapshot", xmlName: "x-ms-copy-destination-snapshot", type: { - name: "String" - } + name: "String", + }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, leaseState: { serializedName: "x-ms-lease-state", @@ -4248,253 +4249,253 @@ export const BlobGetPropertiesHeaders = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { - name: "Number" - } + name: "Number", + }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { - name: "String" - } + name: "String", + }, }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { - name: "String" - } + name: "String", + }, }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { - name: "String" - } + name: "String", + }, }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { - name: "String" - } + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { - name: "String" - } + name: "String", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, accessTier: { serializedName: "x-ms-access-tier", xmlName: "x-ms-access-tier", type: { - name: "String" - } + name: "String", + }, }, accessTierInferred: { serializedName: "x-ms-access-tier-inferred", xmlName: "x-ms-access-tier-inferred", type: { - name: "Boolean" - } + name: "Boolean", + }, }, archiveStatus: { serializedName: "x-ms-archive-status", xmlName: "x-ms-archive-status", type: { - name: "String" - } + name: "String", + }, }, accessTierChangedOn: { serializedName: "x-ms-access-tier-change-time", xmlName: "x-ms-access-tier-change-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, isCurrentVersion: { serializedName: "x-ms-is-current-version", xmlName: "x-ms-is-current-version", type: { - name: "Boolean" - } + name: "Boolean", + }, }, tagCount: { serializedName: "x-ms-tag-count", xmlName: "x-ms-tag-count", type: { - name: "Number" - } + name: "Number", + }, }, expiresOn: { serializedName: "x-ms-expiry-time", xmlName: "x-ms-expiry-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { - name: "Boolean" - } + name: "Boolean", + }, }, rehydratePriority: { serializedName: "x-ms-rehydrate-priority", xmlName: "x-ms-rehydrate-priority", type: { name: "Enum", - allowedValues: ["High", "Standard"] - } + allowedValues: ["High", "Standard"], + }, }, lastAccessed: { serializedName: "x-ms-last-access-time", xmlName: "x-ms-last-access-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyExpiresOn: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobGetPropertiesExceptionHeaders = { serializedName: "Blob_getPropertiesExceptionHeaders", @@ -4506,11 +4507,11 @@ export const BlobGetPropertiesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobDeleteHeaders = { serializedName: "Blob_deleteHeaders", @@ -4522,39 +4523,39 @@ export const BlobDeleteHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobDeleteExceptionHeaders = { serializedName: "Blob_deleteExceptionHeaders", @@ -4566,11 +4567,11 @@ export const BlobDeleteExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobUndeleteHeaders = { serializedName: "Blob_undeleteHeaders", @@ -4582,39 +4583,39 @@ export const BlobUndeleteHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobUndeleteExceptionHeaders = { serializedName: "Blob_undeleteExceptionHeaders", @@ -4626,11 +4627,11 @@ export const BlobUndeleteExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobSetExpiryHeaders = { serializedName: "Blob_setExpiryHeaders", @@ -4642,46 +4643,46 @@ export const BlobSetExpiryHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const BlobSetExpiryExceptionHeaders = { serializedName: "Blob_setExpiryExceptionHeaders", @@ -4693,11 +4694,11 @@ export const BlobSetExpiryExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobSetHttpHeadersHeaders = { serializedName: "Blob_setHttpHeadersHeaders", @@ -4709,60 +4710,60 @@ export const BlobSetHttpHeadersHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobSetHttpHeadersExceptionHeaders = { serializedName: "Blob_setHttpHeadersExceptionHeaders", @@ -4774,11 +4775,11 @@ export const BlobSetHttpHeadersExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobSetImmutabilityPolicyHeaders = { serializedName: "Blob_setImmutabilityPolicyHeaders", @@ -4790,47 +4791,47 @@ export const BlobSetImmutabilityPolicyHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyExpiry: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } - } - } - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, + }, + }, }; export const BlobSetImmutabilityPolicyExceptionHeaders = { serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", @@ -4842,11 +4843,11 @@ export const BlobSetImmutabilityPolicyExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobDeleteImmutabilityPolicyHeaders = { serializedName: "Blob_deleteImmutabilityPolicyHeaders", @@ -4858,32 +4859,32 @@ export const BlobDeleteImmutabilityPolicyHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const BlobDeleteImmutabilityPolicyExceptionHeaders = { serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", @@ -4895,11 +4896,11 @@ export const BlobDeleteImmutabilityPolicyExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobSetLegalHoldHeaders = { serializedName: "Blob_setLegalHoldHeaders", @@ -4911,39 +4912,39 @@ export const BlobSetLegalHoldHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const BlobSetLegalHoldExceptionHeaders = { serializedName: "Blob_setLegalHoldExceptionHeaders", @@ -4955,11 +4956,11 @@ export const BlobSetLegalHoldExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobSetMetadataHeaders = { serializedName: "Blob_setMetadataHeaders", @@ -4971,81 +4972,81 @@ export const BlobSetMetadataHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobSetMetadataExceptionHeaders = { serializedName: "Blob_setMetadataExceptionHeaders", @@ -5057,11 +5058,11 @@ export const BlobSetMetadataExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobAcquireLeaseHeaders = { serializedName: "Blob_acquireLeaseHeaders", @@ -5073,53 +5074,53 @@ export const BlobAcquireLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const BlobAcquireLeaseExceptionHeaders = { serializedName: "Blob_acquireLeaseExceptionHeaders", @@ -5131,11 +5132,11 @@ export const BlobAcquireLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobReleaseLeaseHeaders = { serializedName: "Blob_releaseLeaseHeaders", @@ -5147,46 +5148,46 @@ export const BlobReleaseLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const BlobReleaseLeaseExceptionHeaders = { serializedName: "Blob_releaseLeaseExceptionHeaders", @@ -5198,11 +5199,11 @@ export const BlobReleaseLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobRenewLeaseHeaders = { serializedName: "Blob_renewLeaseHeaders", @@ -5214,53 +5215,53 @@ export const BlobRenewLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const BlobRenewLeaseExceptionHeaders = { serializedName: "Blob_renewLeaseExceptionHeaders", @@ -5272,11 +5273,11 @@ export const BlobRenewLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobChangeLeaseHeaders = { serializedName: "Blob_changeLeaseHeaders", @@ -5288,53 +5289,53 @@ export const BlobChangeLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const BlobChangeLeaseExceptionHeaders = { serializedName: "Blob_changeLeaseExceptionHeaders", @@ -5346,11 +5347,11 @@ export const BlobChangeLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobBreakLeaseHeaders = { serializedName: "Blob_breakLeaseHeaders", @@ -5362,53 +5363,53 @@ export const BlobBreakLeaseHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, leaseTime: { serializedName: "x-ms-lease-time", xmlName: "x-ms-lease-time", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + }, + }, }; export const BlobBreakLeaseExceptionHeaders = { serializedName: "Blob_breakLeaseExceptionHeaders", @@ -5420,11 +5421,11 @@ export const BlobBreakLeaseExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobCreateSnapshotHeaders = { serializedName: "Blob_createSnapshotHeaders", @@ -5436,74 +5437,74 @@ export const BlobCreateSnapshotHeaders = { serializedName: "x-ms-snapshot", xmlName: "x-ms-snapshot", type: { - name: "String" - } + name: "String", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobCreateSnapshotExceptionHeaders = { serializedName: "Blob_createSnapshotExceptionHeaders", @@ -5515,11 +5516,11 @@ export const BlobCreateSnapshotExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobStartCopyFromURLHeaders = { serializedName: "Blob_startCopyFromURLHeaders", @@ -5531,75 +5532,75 @@ export const BlobStartCopyFromURLHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobStartCopyFromURLExceptionHeaders = { serializedName: "Blob_startCopyFromURLExceptionHeaders", @@ -5611,11 +5612,11 @@ export const BlobStartCopyFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobCopyFromURLHeaders = { serializedName: "Blob_copyFromURLHeaders", @@ -5627,96 +5628,96 @@ export const BlobCopyFromURLHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { defaultValue: "success", isConstant: true, serializedName: "x-ms-copy-status", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobCopyFromURLExceptionHeaders = { serializedName: "Blob_copyFromURLExceptionHeaders", @@ -5728,11 +5729,11 @@ export const BlobCopyFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobAbortCopyFromURLHeaders = { serializedName: "Blob_abortCopyFromURLHeaders", @@ -5744,39 +5745,39 @@ export const BlobAbortCopyFromURLHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobAbortCopyFromURLExceptionHeaders = { serializedName: "Blob_abortCopyFromURLExceptionHeaders", @@ -5788,11 +5789,11 @@ export const BlobAbortCopyFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobSetTierHeaders = { serializedName: "Blob_setTierHeaders", @@ -5804,32 +5805,32 @@ export const BlobSetTierHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobSetTierExceptionHeaders = { serializedName: "Blob_setTierExceptionHeaders", @@ -5841,11 +5842,11 @@ export const BlobSetTierExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobGetAccountInfoHeaders = { serializedName: "Blob_getAccountInfoHeaders", @@ -5857,29 +5858,29 @@ export const BlobGetAccountInfoHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, skuName: { serializedName: "x-ms-sku-name", @@ -5891,9 +5892,9 @@ export const BlobGetAccountInfoHeaders = { "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", - "Premium_LRS" - ] - } + "Premium_LRS", + ], + }, }, accountKind: { serializedName: "x-ms-account-kind", @@ -5905,12 +5906,12 @@ export const BlobGetAccountInfoHeaders = { "BlobStorage", "StorageV2", "FileStorage", - "BlockBlobStorage" - ] - } - } - } - } + "BlockBlobStorage", + ], + }, + }, + }, + }, }; export const BlobGetAccountInfoExceptionHeaders = { serializedName: "Blob_getAccountInfoExceptionHeaders", @@ -5922,11 +5923,11 @@ export const BlobGetAccountInfoExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobQueryHeaders = { serializedName: "Blob_queryHeaders", @@ -5938,145 +5939,146 @@ export const BlobQueryHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, metadata: { serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", - value: { type: { name: "String" } } - } + value: { type: { name: "String" } }, + }, }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { - name: "Number" - } + name: "Number", + }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, contentRange: { serializedName: "content-range", xmlName: "content-range", type: { - name: "String" - } + name: "String", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { - name: "String" - } + name: "String", + }, }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { - name: "String" - } + name: "String", + }, }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { - name: "String" - } + name: "String", + }, }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { - name: "String" - } + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, copyCompletionTime: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { - name: "String" - } + name: "String", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { - name: "String" - } + name: "String", + }, }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["infinite", "fixed"], + }, }, leaseState: { serializedName: "x-ms-lease-state", @@ -6088,104 +6090,104 @@ export const BlobQueryHeaders = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } + allowedValues: ["locked", "unlocked"], + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, blobContentMD5: { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } + name: "String", + }, }, contentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } - } - } - } + name: "ByteArray", + }, + }, + }, + }, }; export const BlobQueryExceptionHeaders = { serializedName: "Blob_queryExceptionHeaders", @@ -6197,11 +6199,11 @@ export const BlobQueryExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobGetTagsHeaders = { serializedName: "Blob_getTagsHeaders", @@ -6213,39 +6215,39 @@ export const BlobGetTagsHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobGetTagsExceptionHeaders = { serializedName: "Blob_getTagsExceptionHeaders", @@ -6257,11 +6259,11 @@ export const BlobGetTagsExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobSetTagsHeaders = { serializedName: "Blob_setTagsHeaders", @@ -6273,39 +6275,39 @@ export const BlobSetTagsHeaders = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlobSetTagsExceptionHeaders = { serializedName: "Blob_setTagsExceptionHeaders", @@ -6317,11 +6319,11 @@ export const BlobSetTagsExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobCreateHeaders = { serializedName: "PageBlob_createHeaders", @@ -6333,88 +6335,88 @@ export const PageBlobCreateHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobCreateExceptionHeaders = { serializedName: "PageBlob_createExceptionHeaders", @@ -6426,11 +6428,11 @@ export const PageBlobCreateExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobUploadPagesHeaders = { serializedName: "PageBlob_uploadPagesHeaders", @@ -6442,95 +6444,95 @@ export const PageBlobUploadPagesHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobUploadPagesExceptionHeaders = { serializedName: "PageBlob_uploadPagesExceptionHeaders", @@ -6542,11 +6544,11 @@ export const PageBlobUploadPagesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobClearPagesHeaders = { serializedName: "PageBlob_clearPagesHeaders", @@ -6558,74 +6560,74 @@ export const PageBlobClearPagesHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobClearPagesExceptionHeaders = { serializedName: "PageBlob_clearPagesExceptionHeaders", @@ -6637,11 +6639,11 @@ export const PageBlobClearPagesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobUploadPagesFromURLHeaders = { serializedName: "PageBlob_uploadPagesFromURLHeaders", @@ -6653,88 +6655,88 @@ export const PageBlobUploadPagesFromURLHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobUploadPagesFromURLExceptionHeaders = { serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", @@ -6746,11 +6748,11 @@ export const PageBlobUploadPagesFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobGetPageRangesHeaders = { serializedName: "PageBlob_getPageRangesHeaders", @@ -6762,60 +6764,60 @@ export const PageBlobGetPageRangesHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobGetPageRangesExceptionHeaders = { serializedName: "PageBlob_getPageRangesExceptionHeaders", @@ -6827,11 +6829,11 @@ export const PageBlobGetPageRangesExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobGetPageRangesDiffHeaders = { serializedName: "PageBlob_getPageRangesDiffHeaders", @@ -6843,60 +6845,60 @@ export const PageBlobGetPageRangesDiffHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobGetPageRangesDiffExceptionHeaders = { serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", @@ -6908,11 +6910,11 @@ export const PageBlobGetPageRangesDiffExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobResizeHeaders = { serializedName: "PageBlob_resizeHeaders", @@ -6924,60 +6926,60 @@ export const PageBlobResizeHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobResizeExceptionHeaders = { serializedName: "PageBlob_resizeExceptionHeaders", @@ -6989,11 +6991,11 @@ export const PageBlobResizeExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobUpdateSequenceNumberHeaders = { serializedName: "PageBlob_updateSequenceNumberHeaders", @@ -7005,60 +7007,60 @@ export const PageBlobUpdateSequenceNumberHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobUpdateSequenceNumberExceptionHeaders = { serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", @@ -7070,11 +7072,11 @@ export const PageBlobUpdateSequenceNumberExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobCopyIncrementalHeaders = { serializedName: "PageBlob_copyIncrementalHeaders", @@ -7086,68 +7088,68 @@ export const PageBlobCopyIncrementalHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { - name: "String" - } + name: "String", + }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const PageBlobCopyIncrementalExceptionHeaders = { serializedName: "PageBlob_copyIncrementalExceptionHeaders", @@ -7159,11 +7161,11 @@ export const PageBlobCopyIncrementalExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const AppendBlobCreateHeaders = { serializedName: "AppendBlob_createHeaders", @@ -7175,88 +7177,88 @@ export const AppendBlobCreateHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const AppendBlobCreateExceptionHeaders = { serializedName: "AppendBlob_createExceptionHeaders", @@ -7268,11 +7270,11 @@ export const AppendBlobCreateExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const AppendBlobAppendBlockHeaders = { serializedName: "AppendBlob_appendBlockHeaders", @@ -7284,102 +7286,102 @@ export const AppendBlobAppendBlockHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobAppendOffset: { serializedName: "x-ms-blob-append-offset", xmlName: "x-ms-blob-append-offset", type: { - name: "String" - } + name: "String", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const AppendBlobAppendBlockExceptionHeaders = { serializedName: "AppendBlob_appendBlockExceptionHeaders", @@ -7391,11 +7393,11 @@ export const AppendBlobAppendBlockExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const AppendBlobAppendBlockFromUrlHeaders = { serializedName: "AppendBlob_appendBlockFromUrlHeaders", @@ -7407,95 +7409,95 @@ export const AppendBlobAppendBlockFromUrlHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, blobAppendOffset: { serializedName: "x-ms-blob-append-offset", xmlName: "x-ms-blob-append-offset", type: { - name: "String" - } + name: "String", + }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { - name: "Number" - } + name: "Number", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const AppendBlobAppendBlockFromUrlExceptionHeaders = { serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", @@ -7507,11 +7509,11 @@ export const AppendBlobAppendBlockFromUrlExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const AppendBlobSealHeaders = { serializedName: "AppendBlob_sealHeaders", @@ -7523,53 +7525,53 @@ export const AppendBlobSealHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { - name: "Boolean" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; export const AppendBlobSealExceptionHeaders = { serializedName: "AppendBlob_sealExceptionHeaders", @@ -7581,11 +7583,11 @@ export const AppendBlobSealExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlockBlobUploadHeaders = { serializedName: "BlockBlob_uploadHeaders", @@ -7597,88 +7599,88 @@ export const BlockBlobUploadHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlockBlobUploadExceptionHeaders = { serializedName: "BlockBlob_uploadExceptionHeaders", @@ -7690,11 +7692,11 @@ export const BlockBlobUploadExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlockBlobPutBlobFromUrlHeaders = { serializedName: "BlockBlob_putBlobFromUrlHeaders", @@ -7706,88 +7708,88 @@ export const BlockBlobPutBlobFromUrlHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlockBlobPutBlobFromUrlExceptionHeaders = { serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", @@ -7799,11 +7801,11 @@ export const BlockBlobPutBlobFromUrlExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlockBlobStageBlockHeaders = { serializedName: "BlockBlob_stageBlockHeaders", @@ -7815,74 +7817,74 @@ export const BlockBlobStageBlockHeaders = { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlockBlobStageBlockExceptionHeaders = { serializedName: "BlockBlob_stageBlockExceptionHeaders", @@ -7894,11 +7896,11 @@ export const BlockBlobStageBlockExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlockBlobStageBlockFromURLHeaders = { serializedName: "BlockBlob_stageBlockFromURLHeaders", @@ -7910,74 +7912,74 @@ export const BlockBlobStageBlockFromURLHeaders = { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlockBlobStageBlockFromURLExceptionHeaders = { serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", @@ -7989,11 +7991,11 @@ export const BlockBlobStageBlockFromURLExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlockBlobCommitBlockListHeaders = { serializedName: "BlockBlob_commitBlockListHeaders", @@ -8005,95 +8007,95 @@ export const BlockBlobCommitBlockListHeaders = { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlockBlobCommitBlockListExceptionHeaders = { serializedName: "BlockBlob_commitBlockListExceptionHeaders", @@ -8105,11 +8107,11 @@ export const BlockBlobCommitBlockListExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlockBlobGetBlockListHeaders = { serializedName: "BlockBlob_getBlockListHeaders", @@ -8121,67 +8123,67 @@ export const BlockBlobGetBlockListHeaders = { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; export const BlockBlobGetBlockListExceptionHeaders = { serializedName: "BlockBlob_getBlockListExceptionHeaders", @@ -8193,10 +8195,10 @@ export const BlockBlobGetBlockListExceptionHeaders = { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; //# sourceMappingURL=mappers.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js index 99de150a4..11c9df53f 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/models/parameters.js @@ -5,8 +5,7 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import { QueryCollectionFormat } from "@azure/core-http"; -import { BlobServiceProperties as BlobServicePropertiesMapper, KeyInfo as KeyInfoMapper, QueryRequest as QueryRequestMapper, BlobTags as BlobTagsMapper, BlockLookupList as BlockLookupListMapper } from "../models/mappers"; +import { BlobServiceProperties as BlobServicePropertiesMapper, KeyInfo as KeyInfoMapper, QueryRequest as QueryRequestMapper, BlobTags as BlobTagsMapper, BlockLookupList as BlockLookupListMapper, } from "../models/mappers"; export const contentType = { parameterPath: ["options", "contentType"], mapper: { @@ -14,13 +13,13 @@ export const contentType = { isConstant: true, serializedName: "Content-Type", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const blobServiceProperties = { parameterPath: "blobServiceProperties", - mapper: BlobServicePropertiesMapper + mapper: BlobServicePropertiesMapper, }; export const accept = { parameterPath: "accept", @@ -29,9 +28,9 @@ export const accept = { isConstant: true, serializedName: "Accept", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const url = { parameterPath: "url", @@ -40,10 +39,10 @@ export const url = { required: true, xmlName: "url", type: { - name: "String" - } + name: "String", + }, }, - skipEncoding: true + skipEncoding: true, }; export const restype = { parameterPath: "restype", @@ -52,9 +51,9 @@ export const restype = { isConstant: true, serializedName: "restype", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp = { parameterPath: "comp", @@ -63,33 +62,33 @@ export const comp = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const timeoutInSeconds = { parameterPath: ["options", "timeoutInSeconds"], mapper: { constraints: { - InclusiveMinimum: 0 + InclusiveMinimum: 0, }, serializedName: "timeout", xmlName: "timeout", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const version = { parameterPath: "version", mapper: { - defaultValue: "2023-11-03", + defaultValue: "2024-05-04", isConstant: true, serializedName: "x-ms-version", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const requestId = { parameterPath: ["options", "requestId"], @@ -97,9 +96,9 @@ export const requestId = { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const accept1 = { parameterPath: "accept", @@ -108,9 +107,9 @@ export const accept1 = { isConstant: true, serializedName: "Accept", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp1 = { parameterPath: "comp", @@ -119,9 +118,9 @@ export const comp1 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp2 = { parameterPath: "comp", @@ -130,9 +129,9 @@ export const comp2 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const prefix = { parameterPath: ["options", "prefix"], @@ -140,9 +139,9 @@ export const prefix = { serializedName: "prefix", xmlName: "prefix", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const marker = { parameterPath: ["options", "marker"], @@ -150,22 +149,22 @@ export const marker = { serializedName: "marker", xmlName: "marker", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const maxPageSize = { parameterPath: ["options", "maxPageSize"], mapper: { constraints: { - InclusiveMinimum: 1 + InclusiveMinimum: 1, }, serializedName: "maxresults", xmlName: "maxresults", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const include = { parameterPath: ["options", "include"], @@ -178,16 +177,16 @@ export const include = { element: { type: { name: "Enum", - allowedValues: ["metadata", "deleted", "system"] - } - } - } + allowedValues: ["metadata", "deleted", "system"], + }, + }, + }, }, - collectionFormat: QueryCollectionFormat.Csv + collectionFormat: "CSV", }; export const keyInfo = { parameterPath: "keyInfo", - mapper: KeyInfoMapper + mapper: KeyInfoMapper, }; export const comp3 = { parameterPath: "comp", @@ -196,9 +195,9 @@ export const comp3 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const restype1 = { parameterPath: "restype", @@ -207,9 +206,9 @@ export const restype1 = { isConstant: true, serializedName: "restype", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const body = { parameterPath: "body", @@ -218,9 +217,9 @@ export const body = { required: true, xmlName: "body", type: { - name: "Stream" - } - } + name: "Stream", + }, + }, }; export const comp4 = { parameterPath: "comp", @@ -229,9 +228,9 @@ export const comp4 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const contentLength = { parameterPath: "contentLength", @@ -240,9 +239,9 @@ export const contentLength = { required: true, xmlName: "Content-Length", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const multipartContentType = { parameterPath: "multipartContentType", @@ -251,9 +250,9 @@ export const multipartContentType = { required: true, xmlName: "Content-Type", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp5 = { parameterPath: "comp", @@ -262,9 +261,9 @@ export const comp5 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const where = { parameterPath: ["options", "where"], @@ -272,9 +271,9 @@ export const where = { serializedName: "where", xmlName: "where", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const restype2 = { parameterPath: "restype", @@ -283,21 +282,21 @@ export const restype2 = { isConstant: true, serializedName: "restype", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const metadata = { parameterPath: ["options", "metadata"], mapper: { serializedName: "x-ms-meta", xmlName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", type: { name: "Dictionary", - value: { type: { name: "String" } } + value: { type: { name: "String" } }, }, - headerCollectionPrefix: "x-ms-meta-" - } + }, }; export const access = { parameterPath: ["options", "access"], @@ -306,37 +305,37 @@ export const access = { xmlName: "x-ms-blob-public-access", type: { name: "Enum", - allowedValues: ["container", "blob"] - } - } + allowedValues: ["container", "blob"], + }, + }, }; export const defaultEncryptionScope = { parameterPath: [ "options", "containerEncryptionScope", - "defaultEncryptionScope" + "defaultEncryptionScope", ], mapper: { serializedName: "x-ms-default-encryption-scope", xmlName: "x-ms-default-encryption-scope", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const preventEncryptionScopeOverride = { parameterPath: [ "options", "containerEncryptionScope", - "preventEncryptionScopeOverride" + "preventEncryptionScopeOverride", ], mapper: { serializedName: "x-ms-deny-encryption-scope-override", xmlName: "x-ms-deny-encryption-scope-override", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const leaseId = { parameterPath: ["options", "leaseAccessConditions", "leaseId"], @@ -344,9 +343,9 @@ export const leaseId = { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const ifModifiedSince = { parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], @@ -354,9 +353,9 @@ export const ifModifiedSince = { serializedName: "If-Modified-Since", xmlName: "If-Modified-Since", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; export const ifUnmodifiedSince = { parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], @@ -364,9 +363,9 @@ export const ifUnmodifiedSince = { serializedName: "If-Unmodified-Since", xmlName: "If-Unmodified-Since", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; export const comp6 = { parameterPath: "comp", @@ -375,9 +374,9 @@ export const comp6 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp7 = { parameterPath: "comp", @@ -386,9 +385,9 @@ export const comp7 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const containerAcl = { parameterPath: ["options", "containerAcl"], @@ -402,11 +401,11 @@ export const containerAcl = { element: { type: { name: "Composite", - className: "SignedIdentifier" - } - } - } - } + className: "SignedIdentifier", + }, + }, + }, + }, }; export const comp8 = { parameterPath: "comp", @@ -415,9 +414,9 @@ export const comp8 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const deletedContainerName = { parameterPath: ["options", "deletedContainerName"], @@ -425,9 +424,9 @@ export const deletedContainerName = { serializedName: "x-ms-deleted-container-name", xmlName: "x-ms-deleted-container-name", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const deletedContainerVersion = { parameterPath: ["options", "deletedContainerVersion"], @@ -435,9 +434,9 @@ export const deletedContainerVersion = { serializedName: "x-ms-deleted-container-version", xmlName: "x-ms-deleted-container-version", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp9 = { parameterPath: "comp", @@ -446,9 +445,9 @@ export const comp9 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const sourceContainerName = { parameterPath: "sourceContainerName", @@ -457,9 +456,9 @@ export const sourceContainerName = { required: true, xmlName: "x-ms-source-container-name", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const sourceLeaseId = { parameterPath: ["options", "sourceLeaseId"], @@ -467,9 +466,9 @@ export const sourceLeaseId = { serializedName: "x-ms-source-lease-id", xmlName: "x-ms-source-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp10 = { parameterPath: "comp", @@ -478,9 +477,9 @@ export const comp10 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const action = { parameterPath: "action", @@ -489,9 +488,9 @@ export const action = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const duration = { parameterPath: ["options", "duration"], @@ -499,9 +498,9 @@ export const duration = { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const proposedLeaseId = { parameterPath: ["options", "proposedLeaseId"], @@ -509,9 +508,9 @@ export const proposedLeaseId = { serializedName: "x-ms-proposed-lease-id", xmlName: "x-ms-proposed-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const action1 = { parameterPath: "action", @@ -520,9 +519,9 @@ export const action1 = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const leaseId1 = { parameterPath: "leaseId", @@ -531,9 +530,9 @@ export const leaseId1 = { required: true, xmlName: "x-ms-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const action2 = { parameterPath: "action", @@ -542,9 +541,9 @@ export const action2 = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const action3 = { parameterPath: "action", @@ -553,9 +552,9 @@ export const action3 = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const breakPeriod = { parameterPath: ["options", "breakPeriod"], @@ -563,9 +562,9 @@ export const breakPeriod = { serializedName: "x-ms-lease-break-period", xmlName: "x-ms-lease-break-period", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const action4 = { parameterPath: "action", @@ -574,9 +573,9 @@ export const action4 = { isConstant: true, serializedName: "x-ms-lease-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const proposedLeaseId1 = { parameterPath: "proposedLeaseId", @@ -585,9 +584,9 @@ export const proposedLeaseId1 = { required: true, xmlName: "x-ms-proposed-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const include1 = { parameterPath: ["options", "include"], @@ -610,13 +609,13 @@ export const include1 = { "tags", "immutabilitypolicy", "legalhold", - "deletedwithversions" - ] - } - } - } + "deletedwithversions", + ], + }, + }, + }, }, - collectionFormat: QueryCollectionFormat.Csv + collectionFormat: "CSV", }; export const delimiter = { parameterPath: "delimiter", @@ -625,9 +624,9 @@ export const delimiter = { required: true, xmlName: "delimiter", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const snapshot = { parameterPath: ["options", "snapshot"], @@ -635,9 +634,9 @@ export const snapshot = { serializedName: "snapshot", xmlName: "snapshot", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const versionId = { parameterPath: ["options", "versionId"], @@ -645,9 +644,9 @@ export const versionId = { serializedName: "versionid", xmlName: "versionid", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const range = { parameterPath: ["options", "range"], @@ -655,9 +654,9 @@ export const range = { serializedName: "x-ms-range", xmlName: "x-ms-range", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const rangeGetContentMD5 = { parameterPath: ["options", "rangeGetContentMD5"], @@ -665,9 +664,9 @@ export const rangeGetContentMD5 = { serializedName: "x-ms-range-get-content-md5", xmlName: "x-ms-range-get-content-md5", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const rangeGetContentCRC64 = { parameterPath: ["options", "rangeGetContentCRC64"], @@ -675,9 +674,9 @@ export const rangeGetContentCRC64 = { serializedName: "x-ms-range-get-content-crc64", xmlName: "x-ms-range-get-content-crc64", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const encryptionKey = { parameterPath: ["options", "cpkInfo", "encryptionKey"], @@ -685,9 +684,9 @@ export const encryptionKey = { serializedName: "x-ms-encryption-key", xmlName: "x-ms-encryption-key", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const encryptionKeySha256 = { parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], @@ -695,9 +694,9 @@ export const encryptionKeySha256 = { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const encryptionAlgorithm = { parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], @@ -705,9 +704,9 @@ export const encryptionAlgorithm = { serializedName: "x-ms-encryption-algorithm", xmlName: "x-ms-encryption-algorithm", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const ifMatch = { parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], @@ -715,9 +714,9 @@ export const ifMatch = { serializedName: "If-Match", xmlName: "If-Match", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const ifNoneMatch = { parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], @@ -725,9 +724,9 @@ export const ifNoneMatch = { serializedName: "If-None-Match", xmlName: "If-None-Match", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const ifTags = { parameterPath: ["options", "modifiedAccessConditions", "ifTags"], @@ -735,9 +734,9 @@ export const ifTags = { serializedName: "x-ms-if-tags", xmlName: "x-ms-if-tags", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const deleteSnapshots = { parameterPath: ["options", "deleteSnapshots"], @@ -746,9 +745,9 @@ export const deleteSnapshots = { xmlName: "x-ms-delete-snapshots", type: { name: "Enum", - allowedValues: ["include", "only"] - } - } + allowedValues: ["include", "only"], + }, + }, }; export const blobDeleteType = { parameterPath: ["options", "blobDeleteType"], @@ -756,9 +755,9 @@ export const blobDeleteType = { serializedName: "deletetype", xmlName: "deletetype", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp11 = { parameterPath: "comp", @@ -767,9 +766,9 @@ export const comp11 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const expiryOptions = { parameterPath: "expiryOptions", @@ -778,9 +777,9 @@ export const expiryOptions = { required: true, xmlName: "x-ms-expiry-option", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const expiresOn = { parameterPath: ["options", "expiresOn"], @@ -788,9 +787,9 @@ export const expiresOn = { serializedName: "x-ms-expiry-time", xmlName: "x-ms-expiry-time", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const blobCacheControl = { parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], @@ -798,9 +797,9 @@ export const blobCacheControl = { serializedName: "x-ms-blob-cache-control", xmlName: "x-ms-blob-cache-control", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const blobContentType = { parameterPath: ["options", "blobHttpHeaders", "blobContentType"], @@ -808,9 +807,9 @@ export const blobContentType = { serializedName: "x-ms-blob-content-type", xmlName: "x-ms-blob-content-type", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const blobContentMD5 = { parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], @@ -818,9 +817,9 @@ export const blobContentMD5 = { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; export const blobContentEncoding = { parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], @@ -828,9 +827,9 @@ export const blobContentEncoding = { serializedName: "x-ms-blob-content-encoding", xmlName: "x-ms-blob-content-encoding", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const blobContentLanguage = { parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], @@ -838,9 +837,9 @@ export const blobContentLanguage = { serializedName: "x-ms-blob-content-language", xmlName: "x-ms-blob-content-language", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const blobContentDisposition = { parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], @@ -848,9 +847,9 @@ export const blobContentDisposition = { serializedName: "x-ms-blob-content-disposition", xmlName: "x-ms-blob-content-disposition", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp12 = { parameterPath: "comp", @@ -859,9 +858,9 @@ export const comp12 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const immutabilityPolicyExpiry = { parameterPath: ["options", "immutabilityPolicyExpiry"], @@ -869,9 +868,9 @@ export const immutabilityPolicyExpiry = { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; export const immutabilityPolicyMode = { parameterPath: ["options", "immutabilityPolicyMode"], @@ -880,9 +879,9 @@ export const immutabilityPolicyMode = { xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, }; export const comp13 = { parameterPath: "comp", @@ -891,9 +890,9 @@ export const comp13 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const legalHold = { parameterPath: "legalHold", @@ -902,9 +901,9 @@ export const legalHold = { required: true, xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const encryptionScope = { parameterPath: ["options", "encryptionScope"], @@ -912,9 +911,9 @@ export const encryptionScope = { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp14 = { parameterPath: "comp", @@ -923,9 +922,9 @@ export const comp14 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const tier = { parameterPath: ["options", "tier"], @@ -949,10 +948,10 @@ export const tier = { "Hot", "Cool", "Archive", - "Cold" - ] - } - } + "Cold", + ], + }, + }, }; export const rehydratePriority = { parameterPath: ["options", "rehydratePriority"], @@ -961,37 +960,37 @@ export const rehydratePriority = { xmlName: "x-ms-rehydrate-priority", type: { name: "Enum", - allowedValues: ["High", "Standard"] - } - } + allowedValues: ["High", "Standard"], + }, + }, }; export const sourceIfModifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", - "sourceIfModifiedSince" + "sourceIfModifiedSince", ], mapper: { serializedName: "x-ms-source-if-modified-since", xmlName: "x-ms-source-if-modified-since", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; export const sourceIfUnmodifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", - "sourceIfUnmodifiedSince" + "sourceIfUnmodifiedSince", ], mapper: { serializedName: "x-ms-source-if-unmodified-since", xmlName: "x-ms-source-if-unmodified-since", type: { - name: "DateTimeRfc1123" - } - } + name: "DateTimeRfc1123", + }, + }, }; export const sourceIfMatch = { parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], @@ -999,23 +998,23 @@ export const sourceIfMatch = { serializedName: "x-ms-source-if-match", xmlName: "x-ms-source-if-match", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const sourceIfNoneMatch = { parameterPath: [ "options", "sourceModifiedAccessConditions", - "sourceIfNoneMatch" + "sourceIfNoneMatch", ], mapper: { serializedName: "x-ms-source-if-none-match", xmlName: "x-ms-source-if-none-match", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const sourceIfTags = { parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], @@ -1023,9 +1022,9 @@ export const sourceIfTags = { serializedName: "x-ms-source-if-tags", xmlName: "x-ms-source-if-tags", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const copySource = { parameterPath: "copySource", @@ -1034,9 +1033,9 @@ export const copySource = { required: true, xmlName: "x-ms-copy-source", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const blobTagsString = { parameterPath: ["options", "blobTagsString"], @@ -1044,9 +1043,9 @@ export const blobTagsString = { serializedName: "x-ms-tags", xmlName: "x-ms-tags", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const sealBlob = { parameterPath: ["options", "sealBlob"], @@ -1054,9 +1053,9 @@ export const sealBlob = { serializedName: "x-ms-seal-blob", xmlName: "x-ms-seal-blob", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const legalHold1 = { parameterPath: ["options", "legalHold"], @@ -1064,9 +1063,9 @@ export const legalHold1 = { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const xMsRequiresSync = { parameterPath: "xMsRequiresSync", @@ -1075,9 +1074,9 @@ export const xMsRequiresSync = { isConstant: true, serializedName: "x-ms-requires-sync", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const sourceContentMD5 = { parameterPath: ["options", "sourceContentMD5"], @@ -1085,9 +1084,9 @@ export const sourceContentMD5 = { serializedName: "x-ms-source-content-md5", xmlName: "x-ms-source-content-md5", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; export const copySourceAuthorization = { parameterPath: ["options", "copySourceAuthorization"], @@ -1095,9 +1094,9 @@ export const copySourceAuthorization = { serializedName: "x-ms-copy-source-authorization", xmlName: "x-ms-copy-source-authorization", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const copySourceTags = { parameterPath: ["options", "copySourceTags"], @@ -1106,9 +1105,9 @@ export const copySourceTags = { xmlName: "x-ms-copy-source-tag-option", type: { name: "Enum", - allowedValues: ["REPLACE", "COPY"] - } - } + allowedValues: ["REPLACE", "COPY"], + }, + }, }; export const comp15 = { parameterPath: "comp", @@ -1117,9 +1116,9 @@ export const comp15 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const copyActionAbortConstant = { parameterPath: "copyActionAbortConstant", @@ -1128,9 +1127,9 @@ export const copyActionAbortConstant = { isConstant: true, serializedName: "x-ms-copy-action", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const copyId = { parameterPath: "copyId", @@ -1139,9 +1138,9 @@ export const copyId = { required: true, xmlName: "copyid", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp16 = { parameterPath: "comp", @@ -1150,9 +1149,9 @@ export const comp16 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const tier1 = { parameterPath: "tier", @@ -1177,14 +1176,14 @@ export const tier1 = { "Hot", "Cool", "Archive", - "Cold" - ] - } - } + "Cold", + ], + }, + }, }; export const queryRequest = { parameterPath: ["options", "queryRequest"], - mapper: QueryRequestMapper + mapper: QueryRequestMapper, }; export const comp17 = { parameterPath: "comp", @@ -1193,9 +1192,9 @@ export const comp17 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp18 = { parameterPath: "comp", @@ -1204,13 +1203,13 @@ export const comp18 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const tags = { parameterPath: ["options", "tags"], - mapper: BlobTagsMapper + mapper: BlobTagsMapper, }; export const transactionalContentMD5 = { parameterPath: ["options", "transactionalContentMD5"], @@ -1218,9 +1217,9 @@ export const transactionalContentMD5 = { serializedName: "Content-MD5", xmlName: "Content-MD5", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; export const transactionalContentCrc64 = { parameterPath: ["options", "transactionalContentCrc64"], @@ -1228,9 +1227,9 @@ export const transactionalContentCrc64 = { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; export const blobType = { parameterPath: "blobType", @@ -1239,9 +1238,9 @@ export const blobType = { isConstant: true, serializedName: "x-ms-blob-type", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const blobContentLength = { parameterPath: "blobContentLength", @@ -1250,19 +1249,20 @@ export const blobContentLength = { required: true, xmlName: "x-ms-blob-content-length", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const blobSequenceNumber = { parameterPath: ["options", "blobSequenceNumber"], mapper: { + defaultValue: 0, serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const contentType1 = { parameterPath: ["options", "contentType"], @@ -1271,9 +1271,9 @@ export const contentType1 = { isConstant: true, serializedName: "Content-Type", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const body1 = { parameterPath: "body", @@ -1282,9 +1282,9 @@ export const body1 = { required: true, xmlName: "body", type: { - name: "Stream" - } - } + name: "Stream", + }, + }, }; export const accept2 = { parameterPath: "accept", @@ -1293,9 +1293,9 @@ export const accept2 = { isConstant: true, serializedName: "Accept", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp19 = { parameterPath: "comp", @@ -1304,9 +1304,9 @@ export const comp19 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const pageWrite = { parameterPath: "pageWrite", @@ -1315,51 +1315,51 @@ export const pageWrite = { isConstant: true, serializedName: "x-ms-page-write", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const ifSequenceNumberLessThanOrEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", - "ifSequenceNumberLessThanOrEqualTo" + "ifSequenceNumberLessThanOrEqualTo", ], mapper: { serializedName: "x-ms-if-sequence-number-le", xmlName: "x-ms-if-sequence-number-le", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const ifSequenceNumberLessThan = { parameterPath: [ "options", "sequenceNumberAccessConditions", - "ifSequenceNumberLessThan" + "ifSequenceNumberLessThan", ], mapper: { serializedName: "x-ms-if-sequence-number-lt", xmlName: "x-ms-if-sequence-number-lt", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const ifSequenceNumberEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", - "ifSequenceNumberEqualTo" + "ifSequenceNumberEqualTo", ], mapper: { serializedName: "x-ms-if-sequence-number-eq", xmlName: "x-ms-if-sequence-number-eq", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const pageWrite1 = { parameterPath: "pageWrite", @@ -1368,9 +1368,9 @@ export const pageWrite1 = { isConstant: true, serializedName: "x-ms-page-write", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const sourceUrl = { parameterPath: "sourceUrl", @@ -1379,9 +1379,9 @@ export const sourceUrl = { required: true, xmlName: "x-ms-copy-source", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const sourceRange = { parameterPath: "sourceRange", @@ -1390,9 +1390,9 @@ export const sourceRange = { required: true, xmlName: "x-ms-source-range", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const sourceContentCrc64 = { parameterPath: ["options", "sourceContentCrc64"], @@ -1400,9 +1400,9 @@ export const sourceContentCrc64 = { serializedName: "x-ms-source-content-crc64", xmlName: "x-ms-source-content-crc64", type: { - name: "ByteArray" - } - } + name: "ByteArray", + }, + }, }; export const range1 = { parameterPath: "range", @@ -1411,9 +1411,9 @@ export const range1 = { required: true, xmlName: "x-ms-range", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp20 = { parameterPath: "comp", @@ -1422,9 +1422,9 @@ export const comp20 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const prevsnapshot = { parameterPath: ["options", "prevsnapshot"], @@ -1432,9 +1432,9 @@ export const prevsnapshot = { serializedName: "prevsnapshot", xmlName: "prevsnapshot", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const prevSnapshotUrl = { parameterPath: ["options", "prevSnapshotUrl"], @@ -1442,9 +1442,9 @@ export const prevSnapshotUrl = { serializedName: "x-ms-previous-snapshot-url", xmlName: "x-ms-previous-snapshot-url", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const sequenceNumberAction = { parameterPath: "sequenceNumberAction", @@ -1454,9 +1454,9 @@ export const sequenceNumberAction = { xmlName: "x-ms-sequence-number-action", type: { name: "Enum", - allowedValues: ["max", "update", "increment"] - } - } + allowedValues: ["max", "update", "increment"], + }, + }, }; export const comp21 = { parameterPath: "comp", @@ -1465,9 +1465,9 @@ export const comp21 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const blobType1 = { parameterPath: "blobType", @@ -1476,9 +1476,9 @@ export const blobType1 = { isConstant: true, serializedName: "x-ms-blob-type", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp22 = { parameterPath: "comp", @@ -1487,9 +1487,9 @@ export const comp22 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const maxSize = { parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], @@ -1497,23 +1497,23 @@ export const maxSize = { serializedName: "x-ms-blob-condition-maxsize", xmlName: "x-ms-blob-condition-maxsize", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const appendPosition = { parameterPath: [ "options", "appendPositionAccessConditions", - "appendPosition" + "appendPosition", ], mapper: { serializedName: "x-ms-blob-condition-appendpos", xmlName: "x-ms-blob-condition-appendpos", type: { - name: "Number" - } - } + name: "Number", + }, + }, }; export const sourceRange1 = { parameterPath: ["options", "sourceRange"], @@ -1521,9 +1521,9 @@ export const sourceRange1 = { serializedName: "x-ms-source-range", xmlName: "x-ms-source-range", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const comp23 = { parameterPath: "comp", @@ -1532,9 +1532,9 @@ export const comp23 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const blobType2 = { parameterPath: "blobType", @@ -1543,9 +1543,9 @@ export const blobType2 = { isConstant: true, serializedName: "x-ms-blob-type", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const copySourceBlobProperties = { parameterPath: ["options", "copySourceBlobProperties"], @@ -1553,9 +1553,9 @@ export const copySourceBlobProperties = { serializedName: "x-ms-copy-source-blob-properties", xmlName: "x-ms-copy-source-blob-properties", type: { - name: "Boolean" - } - } + name: "Boolean", + }, + }, }; export const comp24 = { parameterPath: "comp", @@ -1564,9 +1564,9 @@ export const comp24 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const blockId = { parameterPath: "blockId", @@ -1575,13 +1575,13 @@ export const blockId = { required: true, xmlName: "blockid", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const blocks = { parameterPath: "blocks", - mapper: BlockLookupListMapper + mapper: BlockLookupListMapper, }; export const comp25 = { parameterPath: "comp", @@ -1590,9 +1590,9 @@ export const comp25 = { isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; export const listType = { parameterPath: "listType", @@ -1603,8 +1603,8 @@ export const listType = { xmlName: "blocklisttype", type: { name: "Enum", - allowedValues: ["committed", "uncommitted", "all"] - } - } + allowedValues: ["committed", "uncommitted", "all"], + }, + }, }; //# sourceMappingURL=parameters.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js index 7507b1557..d03d327b0 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/appendBlob.js @@ -5,11 +5,11 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import * as coreHttp from "@azure/core-http"; +import * as coreClient from "@azure/core-client"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; -/** Class representing a AppendBlob. */ -export class AppendBlob { +/** Class containing AppendBlob operations. */ +export class AppendBlobImpl { /** * Initialize a new instance of the class AppendBlob class. * @param client Reference to the service client @@ -23,11 +23,7 @@ export class AppendBlob { * @param options The options parameters. */ create(contentLength, options) { - const operationArguments = { - contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec); + return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec); } /** * The Append Block operation commits a new block of data to the end of an existing append blob. The @@ -38,12 +34,7 @@ export class AppendBlob { * @param options The options parameters. */ appendBlock(contentLength, body, options) { - const operationArguments = { - contentLength, - body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); + return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec); } /** * The Append Block operation commits a new block of data to the end of an existing append blob where @@ -55,12 +46,7 @@ export class AppendBlob { * @param options The options parameters. */ appendBlockFromUrl(sourceUrl, contentLength, options) { - const operationArguments = { - sourceUrl, - contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); + return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec); } /** * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version @@ -68,26 +54,22 @@ export class AppendBlob { * @param options The options parameters. */ seal(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, sealOperationSpec); + return this.client.sendOperationRequest({ options }, sealOperationSpec); } } // Operation Specifications -const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const xmlSerializer = coreClient.createSerializer(Mappers, /* isXml */ true); const createOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.AppendBlobCreateHeaders + headersMapper: Mappers.AppendBlobCreateHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.AppendBlobCreateExceptionHeaders - } + headersMapper: Mappers.AppendBlobCreateExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds], urlParameters: [Parameters.url], @@ -117,22 +99,22 @@ const createOperationSpec = { Parameters.encryptionScope, Parameters.blobTagsString, Parameters.legalHold1, - Parameters.blobType1 + Parameters.blobType1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const appendBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.AppendBlobAppendBlockHeaders + headersMapper: Mappers.AppendBlobAppendBlockHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders - } + headersMapper: Mappers.AppendBlobAppendBlockExceptionHeaders, + }, }, requestBody: Parameters.body1, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], @@ -156,22 +138,24 @@ const appendBlockOperationSpec = { Parameters.contentType1, Parameters.accept2, Parameters.maxSize, - Parameters.appendPosition + Parameters.appendPosition, ], + isXML: true, + contentType: "application/xml; charset=utf-8", mediaType: "binary", - serializer + serializer: xmlSerializer, }; const appendBlockFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders + headersMapper: Mappers.AppendBlobAppendBlockFromUrlHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders - } + headersMapper: Mappers.AppendBlobAppendBlockFromUrlExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp22], urlParameters: [Parameters.url], @@ -201,22 +185,22 @@ const appendBlockFromUrlOperationSpec = { Parameters.sourceContentCrc64, Parameters.maxSize, Parameters.appendPosition, - Parameters.sourceRange1 + Parameters.sourceRange1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const sealOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.AppendBlobSealHeaders + headersMapper: Mappers.AppendBlobSealHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.AppendBlobSealExceptionHeaders - } + headersMapper: Mappers.AppendBlobSealExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp23], urlParameters: [Parameters.url], @@ -229,9 +213,9 @@ const sealOperationSpec = { Parameters.ifUnmodifiedSince, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.appendPosition + Parameters.appendPosition, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; //# sourceMappingURL=appendBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js index 6810389a7..5c19af71c 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blob.js @@ -5,11 +5,11 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import * as coreHttp from "@azure/core-http"; +import * as coreClient from "@azure/core-client"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; -/** Class representing a Blob. */ -export class Blob { +/** Class containing Blob operations. */ +export class BlobImpl { /** * Initialize a new instance of the class Blob class. * @param client Reference to the service client @@ -23,10 +23,7 @@ export class Blob { * @param options The options parameters. */ download(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); + return this.client.sendOperationRequest({ options }, downloadOperationSpec); } /** * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system @@ -34,10 +31,7 @@ export class Blob { * @param options The options parameters. */ getProperties(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); } /** * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is @@ -55,20 +49,14 @@ export class Blob { * @param options The options parameters. */ delete(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + return this.client.sendOperationRequest({ options }, deleteOperationSpec); } /** * Undelete a blob that was previously soft deleted * @param options The options parameters. */ undelete(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); + return this.client.sendOperationRequest({ options }, undeleteOperationSpec); } /** * Sets the time a blob will expire and be deleted. @@ -76,41 +64,28 @@ export class Blob { * @param options The options parameters. */ setExpiry(expiryOptions, options) { - const operationArguments = { - expiryOptions, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); + return this.client.sendOperationRequest({ expiryOptions, options }, setExpiryOperationSpec); } /** * The Set HTTP Headers operation sets system properties on the blob * @param options The options parameters. */ setHttpHeaders(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); + return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); } /** * The Set Immutability Policy operation sets the immutability policy on the blob * @param options The options parameters. */ setImmutabilityPolicy(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); + return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); } /** * The Delete Immutability Policy operation deletes the immutability policy on the blob * @param options The options parameters. */ deleteImmutabilityPolicy(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); + return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); } /** * The Set Legal Hold operation sets a legal hold on the blob. @@ -118,11 +93,7 @@ export class Blob { * @param options The options parameters. */ setLegalHold(legalHold, options) { - const operationArguments = { - legalHold, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); + return this.client.sendOperationRequest({ legalHold, options }, setLegalHoldOperationSpec); } /** * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more @@ -130,10 +101,7 @@ export class Blob { * @param options The options parameters. */ setMetadata(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -141,10 +109,7 @@ export class Blob { * @param options The options parameters. */ acquireLease(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -153,11 +118,7 @@ export class Blob { * @param options The options parameters. */ releaseLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -166,11 +127,7 @@ export class Blob { * @param options The options parameters. */ renewLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -182,12 +139,7 @@ export class Blob { * @param options The options parameters. */ changeLease(leaseId, proposedLeaseId, options) { - const operationArguments = { - leaseId, - proposedLeaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -195,20 +147,14 @@ export class Blob { * @param options The options parameters. */ breakLease(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); } /** * The Create Snapshot operation creates a read-only snapshot of a blob * @param options The options parameters. */ createSnapshot(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); + return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); } /** * The Start Copy From URL operation copies a blob or an internet resource to a new blob. @@ -219,11 +165,7 @@ export class Blob { * @param options The options parameters. */ startCopyFromURL(copySource, options) { - const operationArguments = { - copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); + return this.client.sendOperationRequest({ copySource, options }, startCopyFromURLOperationSpec); } /** * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return @@ -235,11 +177,7 @@ export class Blob { * @param options The options parameters. */ copyFromURL(copySource, options) { - const operationArguments = { - copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); + return this.client.sendOperationRequest({ copySource, options }, copyFromURLOperationSpec); } /** * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination @@ -249,11 +187,7 @@ export class Blob { * @param options The options parameters. */ abortCopyFromURL(copyId, options) { - const operationArguments = { - copyId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); + return this.client.sendOperationRequest({ copyId, options }, abortCopyFromURLOperationSpec); } /** * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium @@ -265,21 +199,14 @@ export class Blob { * @param options The options parameters. */ setTier(tier, options) { - const operationArguments = { - tier, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); + return this.client.sendOperationRequest({ tier, options }, setTierOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); } /** * The Query operation enables users to select/project on blob data by providing simple query @@ -287,34 +214,25 @@ export class Blob { * @param options The options parameters. */ query(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, queryOperationSpec); + return this.client.sendOperationRequest({ options }, queryOperationSpec); } /** * The Get Tags operation enables users to get the tags associated with a blob. * @param options The options parameters. */ getTags(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); + return this.client.sendOperationRequest({ options }, getTagsOperationSpec); } /** * The Set Tags operation enables users to set tags on a blob. * @param options The options parameters. */ setTags(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); + return this.client.sendOperationRequest({ options }, setTagsOperationSpec); } } // Operation Specifications -const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const xmlSerializer = coreClient.createSerializer(Mappers, /* isXml */ true); const downloadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", @@ -322,26 +240,26 @@ const downloadOperationSpec = { 200: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: Mappers.BlobDownloadHeaders + headersMapper: Mappers.BlobDownloadHeaders, }, 206: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: Mappers.BlobDownloadHeaders + headersMapper: Mappers.BlobDownloadHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobDownloadExceptionHeaders - } + headersMapper: Mappers.BlobDownloadExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.snapshot, - Parameters.versionId + Parameters.versionId, ], urlParameters: [Parameters.url], headerParameters: [ @@ -359,27 +277,27 @@ const downloadOperationSpec = { Parameters.encryptionAlgorithm, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.ifTags + Parameters.ifTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const getPropertiesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "HEAD", responses: { 200: { - headersMapper: Mappers.BlobGetPropertiesHeaders + headersMapper: Mappers.BlobGetPropertiesHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobGetPropertiesExceptionHeaders - } + headersMapper: Mappers.BlobGetPropertiesExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.snapshot, - Parameters.versionId + Parameters.versionId, ], urlParameters: [Parameters.url], headerParameters: [ @@ -394,28 +312,28 @@ const getPropertiesOperationSpec = { Parameters.encryptionAlgorithm, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.ifTags + Parameters.ifTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const deleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { 202: { - headersMapper: Mappers.BlobDeleteHeaders + headersMapper: Mappers.BlobDeleteHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobDeleteExceptionHeaders - } + headersMapper: Mappers.BlobDeleteExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.snapshot, Parameters.versionId, - Parameters.blobDeleteType + Parameters.blobDeleteType, ], urlParameters: [Parameters.url], headerParameters: [ @@ -428,44 +346,44 @@ const deleteOperationSpec = { Parameters.ifMatch, Parameters.ifNoneMatch, Parameters.ifTags, - Parameters.deleteSnapshots + Parameters.deleteSnapshots, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const undeleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.BlobUndeleteHeaders + headersMapper: Mappers.BlobUndeleteHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobUndeleteExceptionHeaders - } + headersMapper: Mappers.BlobUndeleteExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp8], urlParameters: [Parameters.url], headerParameters: [ Parameters.version, Parameters.requestId, - Parameters.accept1 + Parameters.accept1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const setExpiryOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.BlobSetExpiryHeaders + headersMapper: Mappers.BlobSetExpiryHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetExpiryExceptionHeaders - } + headersMapper: Mappers.BlobSetExpiryExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp11], urlParameters: [Parameters.url], @@ -474,22 +392,22 @@ const setExpiryOperationSpec = { Parameters.requestId, Parameters.accept1, Parameters.expiryOptions, - Parameters.expiresOn + Parameters.expiresOn, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const setHttpHeadersOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.BlobSetHttpHeadersHeaders + headersMapper: Mappers.BlobSetHttpHeadersHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders - } + headersMapper: Mappers.BlobSetHttpHeadersExceptionHeaders, + }, }, queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], urlParameters: [Parameters.url], @@ -508,22 +426,22 @@ const setHttpHeadersOperationSpec = { Parameters.blobContentMD5, Parameters.blobContentEncoding, Parameters.blobContentLanguage, - Parameters.blobContentDisposition + Parameters.blobContentDisposition, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const setImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders + headersMapper: Mappers.BlobSetImmutabilityPolicyHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders - } + headersMapper: Mappers.BlobSetImmutabilityPolicyExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12], urlParameters: [Parameters.url], @@ -533,44 +451,44 @@ const setImmutabilityPolicyOperationSpec = { Parameters.accept1, Parameters.ifUnmodifiedSince, Parameters.immutabilityPolicyExpiry, - Parameters.immutabilityPolicyMode + Parameters.immutabilityPolicyMode, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const deleteImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { 200: { - headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders + headersMapper: Mappers.BlobDeleteImmutabilityPolicyHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders - } + headersMapper: Mappers.BlobDeleteImmutabilityPolicyExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp12], urlParameters: [Parameters.url], headerParameters: [ Parameters.version, Parameters.requestId, - Parameters.accept1 + Parameters.accept1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const setLegalHoldOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.BlobSetLegalHoldHeaders + headersMapper: Mappers.BlobSetLegalHoldHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders - } + headersMapper: Mappers.BlobSetLegalHoldExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp13], urlParameters: [Parameters.url], @@ -578,22 +496,22 @@ const setLegalHoldOperationSpec = { Parameters.version, Parameters.requestId, Parameters.accept1, - Parameters.legalHold + Parameters.legalHold, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const setMetadataOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.BlobSetMetadataHeaders + headersMapper: Mappers.BlobSetMetadataHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetMetadataExceptionHeaders - } + headersMapper: Mappers.BlobSetMetadataExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp6], urlParameters: [Parameters.url], @@ -611,22 +529,22 @@ const setMetadataOperationSpec = { Parameters.ifMatch, Parameters.ifNoneMatch, Parameters.ifTags, - Parameters.encryptionScope + Parameters.encryptionScope, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const acquireLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.BlobAcquireLeaseHeaders + headersMapper: Mappers.BlobAcquireLeaseHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders - } + headersMapper: Mappers.BlobAcquireLeaseExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], urlParameters: [Parameters.url], @@ -641,22 +559,22 @@ const acquireLeaseOperationSpec = { Parameters.proposedLeaseId, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.ifTags + Parameters.ifTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const releaseLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.BlobReleaseLeaseHeaders + headersMapper: Mappers.BlobReleaseLeaseHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders - } + headersMapper: Mappers.BlobReleaseLeaseExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], urlParameters: [Parameters.url], @@ -670,22 +588,22 @@ const releaseLeaseOperationSpec = { Parameters.leaseId1, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.ifTags + Parameters.ifTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const renewLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.BlobRenewLeaseHeaders + headersMapper: Mappers.BlobRenewLeaseHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobRenewLeaseExceptionHeaders - } + headersMapper: Mappers.BlobRenewLeaseExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], urlParameters: [Parameters.url], @@ -699,22 +617,22 @@ const renewLeaseOperationSpec = { Parameters.action2, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.ifTags + Parameters.ifTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const changeLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.BlobChangeLeaseHeaders + headersMapper: Mappers.BlobChangeLeaseHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobChangeLeaseExceptionHeaders - } + headersMapper: Mappers.BlobChangeLeaseExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], urlParameters: [Parameters.url], @@ -729,22 +647,22 @@ const changeLeaseOperationSpec = { Parameters.proposedLeaseId1, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.ifTags + Parameters.ifTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const breakLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { - headersMapper: Mappers.BlobBreakLeaseHeaders + headersMapper: Mappers.BlobBreakLeaseHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobBreakLeaseExceptionHeaders - } + headersMapper: Mappers.BlobBreakLeaseExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp10], urlParameters: [Parameters.url], @@ -758,22 +676,22 @@ const breakLeaseOperationSpec = { Parameters.breakPeriod, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.ifTags + Parameters.ifTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const createSnapshotOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.BlobCreateSnapshotHeaders + headersMapper: Mappers.BlobCreateSnapshotHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders - } + headersMapper: Mappers.BlobCreateSnapshotExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp14], urlParameters: [Parameters.url], @@ -791,22 +709,22 @@ const createSnapshotOperationSpec = { Parameters.ifMatch, Parameters.ifNoneMatch, Parameters.ifTags, - Parameters.encryptionScope + Parameters.encryptionScope, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const startCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { - headersMapper: Mappers.BlobStartCopyFromURLHeaders + headersMapper: Mappers.BlobStartCopyFromURLHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders - } + headersMapper: Mappers.BlobStartCopyFromURLExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds], urlParameters: [Parameters.url], @@ -833,22 +751,22 @@ const startCopyFromURLOperationSpec = { Parameters.copySource, Parameters.blobTagsString, Parameters.sealBlob, - Parameters.legalHold1 + Parameters.legalHold1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const copyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { - headersMapper: Mappers.BlobCopyFromURLHeaders + headersMapper: Mappers.BlobCopyFromURLHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobCopyFromURLExceptionHeaders - } + headersMapper: Mappers.BlobCopyFromURLExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds], urlParameters: [Parameters.url], @@ -877,27 +795,27 @@ const copyFromURLOperationSpec = { Parameters.xMsRequiresSync, Parameters.sourceContentMD5, Parameters.copySourceAuthorization, - Parameters.copySourceTags + Parameters.copySourceTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const abortCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 204: { - headersMapper: Mappers.BlobAbortCopyFromURLHeaders + headersMapper: Mappers.BlobAbortCopyFromURLHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders - } + headersMapper: Mappers.BlobAbortCopyFromURLExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.comp15, - Parameters.copyId + Parameters.copyId, ], urlParameters: [Parameters.url], headerParameters: [ @@ -905,31 +823,31 @@ const abortCopyFromURLOperationSpec = { Parameters.requestId, Parameters.accept1, Parameters.leaseId, - Parameters.copyActionAbortConstant + Parameters.copyActionAbortConstant, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const setTierOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.BlobSetTierHeaders + headersMapper: Mappers.BlobSetTierHeaders, }, 202: { - headersMapper: Mappers.BlobSetTierHeaders + headersMapper: Mappers.BlobSetTierHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetTierExceptionHeaders - } + headersMapper: Mappers.BlobSetTierExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.snapshot, Parameters.versionId, - Parameters.comp16 + Parameters.comp16, ], urlParameters: [Parameters.url], headerParameters: [ @@ -939,28 +857,28 @@ const setTierOperationSpec = { Parameters.leaseId, Parameters.ifTags, Parameters.rehydratePriority, - Parameters.tier1 + Parameters.tier1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const getAccountInfoOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { - headersMapper: Mappers.BlobGetAccountInfoHeaders + headersMapper: Mappers.BlobGetAccountInfoHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders - } + headersMapper: Mappers.BlobGetAccountInfoExceptionHeaders, + }, }, queryParameters: [Parameters.comp, Parameters.restype1], urlParameters: [Parameters.url], headerParameters: [Parameters.version, Parameters.accept1], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const queryOperationSpec = { path: "/{containerName}/{blob}", @@ -969,27 +887,27 @@ const queryOperationSpec = { 200: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: Mappers.BlobQueryHeaders + headersMapper: Mappers.BlobQueryHeaders, }, 206: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: Mappers.BlobQueryHeaders + headersMapper: Mappers.BlobQueryHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobQueryExceptionHeaders - } + headersMapper: Mappers.BlobQueryExceptionHeaders, + }, }, requestBody: Parameters.queryRequest, queryParameters: [ Parameters.timeoutInSeconds, Parameters.snapshot, - Parameters.comp17 + Parameters.comp17, ], urlParameters: [Parameters.url], headerParameters: [ @@ -1005,12 +923,12 @@ const queryOperationSpec = { Parameters.encryptionAlgorithm, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.ifTags + Parameters.ifTags, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer, }; const getTagsOperationSpec = { path: "/{containerName}/{blob}", @@ -1018,18 +936,18 @@ const getTagsOperationSpec = { responses: { 200: { bodyMapper: Mappers.BlobTags, - headersMapper: Mappers.BlobGetTagsHeaders + headersMapper: Mappers.BlobGetTagsHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobGetTagsExceptionHeaders - } + headersMapper: Mappers.BlobGetTagsExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.snapshot, Parameters.versionId, - Parameters.comp18 + Parameters.comp18, ], urlParameters: [Parameters.url], headerParameters: [ @@ -1037,28 +955,28 @@ const getTagsOperationSpec = { Parameters.requestId, Parameters.accept1, Parameters.leaseId, - Parameters.ifTags + Parameters.ifTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const setTagsOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 204: { - headersMapper: Mappers.BlobSetTagsHeaders + headersMapper: Mappers.BlobSetTagsHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlobSetTagsExceptionHeaders - } + headersMapper: Mappers.BlobSetTagsExceptionHeaders, + }, }, requestBody: Parameters.tags, queryParameters: [ Parameters.timeoutInSeconds, Parameters.versionId, - Parameters.comp18 + Parameters.comp18, ], urlParameters: [Parameters.url], headerParameters: [ @@ -1069,11 +987,11 @@ const setTagsOperationSpec = { Parameters.leaseId, Parameters.ifTags, Parameters.transactionalContentMD5, - Parameters.transactionalContentCrc64 + Parameters.transactionalContentCrc64, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer, }; //# sourceMappingURL=blob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js index 266219b49..10ac88c28 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/blockBlob.js @@ -5,11 +5,11 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import * as coreHttp from "@azure/core-http"; +import * as coreClient from "@azure/core-client"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; -/** Class representing a BlockBlob. */ -export class BlockBlob { +/** Class containing BlockBlob operations. */ +export class BlockBlobImpl { /** * Initialize a new instance of the class BlockBlob class. * @param client Reference to the service client @@ -27,12 +27,7 @@ export class BlockBlob { * @param options The options parameters. */ upload(contentLength, body, options) { - const operationArguments = { - contentLength, - body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); + return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec); } /** * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read @@ -48,12 +43,7 @@ export class BlockBlob { * @param options The options parameters. */ putBlobFromUrl(contentLength, copySource, options) { - const operationArguments = { - contentLength, - copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); + return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec); } /** * The Stage Block operation creates a new block to be committed as part of a blob @@ -65,13 +55,7 @@ export class BlockBlob { * @param options The options parameters. */ stageBlock(blockId, contentLength, body, options) { - const operationArguments = { - blockId, - contentLength, - body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); + return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec); } /** * The Stage Block operation creates a new block to be committed as part of a blob where the contents @@ -84,13 +68,7 @@ export class BlockBlob { * @param options The options parameters. */ stageBlockFromURL(blockId, contentLength, sourceUrl, options) { - const operationArguments = { - blockId, - contentLength, - sourceUrl, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); + return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec); } /** * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the @@ -104,11 +82,7 @@ export class BlockBlob { * @param options The options parameters. */ commitBlockList(blocks, options) { - const operationArguments = { - blocks, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); + return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec); } /** * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block @@ -118,27 +92,22 @@ export class BlockBlob { * @param options The options parameters. */ getBlockList(listType, options) { - const operationArguments = { - listType, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); + return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec); } } // Operation Specifications -const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const xmlSerializer = coreClient.createSerializer(Mappers, /* isXml */ true); const uploadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.BlockBlobUploadHeaders + headersMapper: Mappers.BlockBlobUploadHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlockBlobUploadExceptionHeaders - } + headersMapper: Mappers.BlockBlobUploadExceptionHeaders, + }, }, requestBody: Parameters.body1, queryParameters: [Parameters.timeoutInSeconds], @@ -173,22 +142,24 @@ const uploadOperationSpec = { Parameters.transactionalContentCrc64, Parameters.contentType1, Parameters.accept2, - Parameters.blobType2 + Parameters.blobType2, ], + isXML: true, + contentType: "application/xml; charset=utf-8", mediaType: "binary", - serializer + serializer: xmlSerializer, }; const putBlobFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders + headersMapper: Mappers.BlockBlobPutBlobFromUrlHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders - } + headersMapper: Mappers.BlockBlobPutBlobFromUrlExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds], urlParameters: [Parameters.url], @@ -227,28 +198,28 @@ const putBlobFromUrlOperationSpec = { Parameters.copySourceTags, Parameters.transactionalContentMD5, Parameters.blobType2, - Parameters.copySourceBlobProperties + Parameters.copySourceBlobProperties, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const stageBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.BlockBlobStageBlockHeaders + headersMapper: Mappers.BlockBlobStageBlockHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders - } + headersMapper: Mappers.BlockBlobStageBlockExceptionHeaders, + }, }, requestBody: Parameters.body1, queryParameters: [ Parameters.timeoutInSeconds, Parameters.comp24, - Parameters.blockId + Parameters.blockId, ], urlParameters: [Parameters.url], headerParameters: [ @@ -263,27 +234,29 @@ const stageBlockOperationSpec = { Parameters.transactionalContentMD5, Parameters.transactionalContentCrc64, Parameters.contentType1, - Parameters.accept2 + Parameters.accept2, ], + isXML: true, + contentType: "application/xml; charset=utf-8", mediaType: "binary", - serializer + serializer: xmlSerializer, }; const stageBlockFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders + headersMapper: Mappers.BlockBlobStageBlockFromURLHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders - } + headersMapper: Mappers.BlockBlobStageBlockFromURLExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.comp24, - Parameters.blockId + Parameters.blockId, ], urlParameters: [Parameters.url], headerParameters: [ @@ -304,22 +277,22 @@ const stageBlockFromURLOperationSpec = { Parameters.copySourceAuthorization, Parameters.sourceUrl, Parameters.sourceContentCrc64, - Parameters.sourceRange1 + Parameters.sourceRange1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const commitBlockListOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.BlockBlobCommitBlockListHeaders + headersMapper: Mappers.BlockBlobCommitBlockListHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders - } + headersMapper: Mappers.BlockBlobCommitBlockListExceptionHeaders, + }, }, requestBody: Parameters.blocks, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp25], @@ -352,12 +325,12 @@ const commitBlockListOperationSpec = { Parameters.blobTagsString, Parameters.legalHold1, Parameters.transactionalContentMD5, - Parameters.transactionalContentCrc64 + Parameters.transactionalContentCrc64, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer, }; const getBlockListOperationSpec = { path: "/{containerName}/{blob}", @@ -365,18 +338,18 @@ const getBlockListOperationSpec = { responses: { 200: { bodyMapper: Mappers.BlockList, - headersMapper: Mappers.BlockBlobGetBlockListHeaders + headersMapper: Mappers.BlockBlobGetBlockListHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders - } + headersMapper: Mappers.BlockBlobGetBlockListExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.snapshot, Parameters.comp25, - Parameters.listType + Parameters.listType, ], urlParameters: [Parameters.url], headerParameters: [ @@ -384,9 +357,9 @@ const getBlockListOperationSpec = { Parameters.requestId, Parameters.accept1, Parameters.leaseId, - Parameters.ifTags + Parameters.ifTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; //# sourceMappingURL=blockBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js index 17f5d4ca8..56eb860fc 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/container.js @@ -5,11 +5,11 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import * as coreHttp from "@azure/core-http"; +import * as coreClient from "@azure/core-client"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; -/** Class representing a Container. */ -export class Container { +/** Class containing Container operations. */ +export class ContainerImpl { /** * Initialize a new instance of the class Container class. * @param client Reference to the service client @@ -23,10 +23,7 @@ export class Container { * @param options The options parameters. */ create(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec); + return this.client.sendOperationRequest({ options }, createOperationSpec); } /** * returns all user-defined metadata and system properties for the specified container. The data @@ -34,10 +31,7 @@ export class Container { * @param options The options parameters. */ getProperties(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); } /** * operation marks the specified container for deletion. The container and any blobs contained within @@ -45,20 +39,14 @@ export class Container { * @param options The options parameters. */ delete(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + return this.client.sendOperationRequest({ options }, deleteOperationSpec); } /** * operation sets one or more user-defined name-value pairs for the specified container. * @param options The options parameters. */ setMetadata(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); } /** * gets the permissions for the specified container. The permissions indicate whether container data @@ -66,10 +54,7 @@ export class Container { * @param options The options parameters. */ getAccessPolicy(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); + return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); } /** * sets the permissions for the specified container. The permissions indicate whether blobs in a @@ -77,20 +62,14 @@ export class Container { * @param options The options parameters. */ setAccessPolicy(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); + return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); } /** * Restores a previously-deleted container. * @param options The options parameters. */ restore(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); + return this.client.sendOperationRequest({ options }, restoreOperationSpec); } /** * Renames an existing container. @@ -98,11 +77,7 @@ export class Container { * @param options The options parameters. */ rename(sourceContainerName, options) { - const operationArguments = { - sourceContainerName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renameOperationSpec); + return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. @@ -113,13 +88,7 @@ export class Container { * @param options The options parameters. */ submitBatch(contentLength, multipartContentType, body, options) { - const operationArguments = { - contentLength, - multipartContentType, - body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); } /** * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given @@ -127,10 +96,7 @@ export class Container { * @param options The options parameters. */ filterBlobs(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -138,10 +104,7 @@ export class Container { * @param options The options parameters. */ acquireLease(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -150,11 +113,7 @@ export class Container { * @param options The options parameters. */ releaseLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -163,11 +122,7 @@ export class Container { * @param options The options parameters. */ renewLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -175,10 +130,7 @@ export class Container { * @param options The options parameters. */ breakLease(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -190,22 +142,14 @@ export class Container { * @param options The options parameters. */ changeLease(leaseId, proposedLeaseId, options) { - const operationArguments = { - leaseId, - proposedLeaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container * @param options The options parameters. */ listBlobFlatSegment(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); + return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container @@ -216,36 +160,29 @@ export class Container { * @param options The options parameters. */ listBlobHierarchySegment(delimiter, options) { - const operationArguments = { - delimiter, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); + return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); } } // Operation Specifications -const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const xmlSerializer = coreClient.createSerializer(Mappers, /* isXml */ true); const createOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.ContainerCreateHeaders + headersMapper: Mappers.ContainerCreateHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerCreateExceptionHeaders - } + headersMapper: Mappers.ContainerCreateExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], urlParameters: [Parameters.url], @@ -256,22 +193,22 @@ const createOperationSpec = { Parameters.metadata, Parameters.access, Parameters.defaultEncryptionScope, - Parameters.preventEncryptionScopeOverride + Parameters.preventEncryptionScopeOverride, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const getPropertiesOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { - headersMapper: Mappers.ContainerGetPropertiesHeaders + headersMapper: Mappers.ContainerGetPropertiesHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders - } + headersMapper: Mappers.ContainerGetPropertiesExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], urlParameters: [Parameters.url], @@ -279,22 +216,22 @@ const getPropertiesOperationSpec = { Parameters.version, Parameters.requestId, Parameters.accept1, - Parameters.leaseId + Parameters.leaseId, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const deleteOperationSpec = { path: "/{containerName}", httpMethod: "DELETE", responses: { 202: { - headersMapper: Mappers.ContainerDeleteHeaders + headersMapper: Mappers.ContainerDeleteHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerDeleteExceptionHeaders - } + headersMapper: Mappers.ContainerDeleteExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.restype2], urlParameters: [Parameters.url], @@ -304,27 +241,27 @@ const deleteOperationSpec = { Parameters.accept1, Parameters.leaseId, Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince + Parameters.ifUnmodifiedSince, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const setMetadataOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.ContainerSetMetadataHeaders + headersMapper: Mappers.ContainerSetMetadataHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerSetMetadataExceptionHeaders - } + headersMapper: Mappers.ContainerSetMetadataExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.restype2, - Parameters.comp6 + Parameters.comp6, ], urlParameters: [Parameters.url], headerParameters: [ @@ -333,10 +270,10 @@ const setMetadataOperationSpec = { Parameters.accept1, Parameters.metadata, Parameters.leaseId, - Parameters.ifModifiedSince + Parameters.ifModifiedSince, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const getAccessPolicyOperationSpec = { path: "/{containerName}", @@ -347,53 +284,53 @@ const getAccessPolicyOperationSpec = { type: { name: "Sequence", element: { - type: { name: "Composite", className: "SignedIdentifier" } - } + type: { name: "Composite", className: "SignedIdentifier" }, + }, }, serializedName: "SignedIdentifiers", xmlName: "SignedIdentifiers", xmlIsWrapped: true, - xmlElementName: "SignedIdentifier" + xmlElementName: "SignedIdentifier", }, - headersMapper: Mappers.ContainerGetAccessPolicyHeaders + headersMapper: Mappers.ContainerGetAccessPolicyHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders - } + headersMapper: Mappers.ContainerGetAccessPolicyExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.restype2, - Parameters.comp7 + Parameters.comp7, ], urlParameters: [Parameters.url], headerParameters: [ Parameters.version, Parameters.requestId, Parameters.accept1, - Parameters.leaseId + Parameters.leaseId, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const setAccessPolicyOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.ContainerSetAccessPolicyHeaders + headersMapper: Mappers.ContainerSetAccessPolicyHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders - } + headersMapper: Mappers.ContainerSetAccessPolicyExceptionHeaders, + }, }, requestBody: Parameters.containerAcl, queryParameters: [ Parameters.timeoutInSeconds, Parameters.restype2, - Parameters.comp7 + Parameters.comp7, ], urlParameters: [Parameters.url], headerParameters: [ @@ -404,29 +341,29 @@ const setAccessPolicyOperationSpec = { Parameters.access, Parameters.leaseId, Parameters.ifModifiedSince, - Parameters.ifUnmodifiedSince + Parameters.ifUnmodifiedSince, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer, }; const restoreOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.ContainerRestoreHeaders + headersMapper: Mappers.ContainerRestoreHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerRestoreExceptionHeaders - } + headersMapper: Mappers.ContainerRestoreExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.restype2, - Parameters.comp8 + Parameters.comp8, ], urlParameters: [Parameters.url], headerParameters: [ @@ -434,27 +371,27 @@ const restoreOperationSpec = { Parameters.requestId, Parameters.accept1, Parameters.deletedContainerName, - Parameters.deletedContainerVersion + Parameters.deletedContainerVersion, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const renameOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.ContainerRenameHeaders + headersMapper: Mappers.ContainerRenameHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerRenameExceptionHeaders - } + headersMapper: Mappers.ContainerRenameExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.restype2, - Parameters.comp9 + Parameters.comp9, ], urlParameters: [Parameters.url], headerParameters: [ @@ -462,10 +399,10 @@ const renameOperationSpec = { Parameters.requestId, Parameters.accept1, Parameters.sourceContainerName, - Parameters.sourceLeaseId + Parameters.sourceLeaseId, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const submitBatchOperationSpec = { path: "/{containerName}", @@ -474,34 +411,33 @@ const submitBatchOperationSpec = { 202: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: Mappers.ContainerSubmitBatchHeaders + headersMapper: Mappers.ContainerSubmitBatchHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders - } + headersMapper: Mappers.ContainerSubmitBatchExceptionHeaders, + }, }, requestBody: Parameters.body, queryParameters: [ Parameters.timeoutInSeconds, Parameters.comp4, - Parameters.restype2 + Parameters.restype2, ], urlParameters: [Parameters.url], headerParameters: [ - Parameters.contentType, Parameters.accept, Parameters.version, Parameters.requestId, Parameters.contentLength, - Parameters.multipartContentType + Parameters.multipartContentType, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer, }; const filterBlobsOperationSpec = { path: "/{containerName}", @@ -509,12 +445,12 @@ const filterBlobsOperationSpec = { responses: { 200: { bodyMapper: Mappers.FilterBlobSegment, - headersMapper: Mappers.ContainerFilterBlobsHeaders + headersMapper: Mappers.ContainerFilterBlobsHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders - } + headersMapper: Mappers.ContainerFilterBlobsExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, @@ -522,33 +458,33 @@ const filterBlobsOperationSpec = { Parameters.maxPageSize, Parameters.comp5, Parameters.where, - Parameters.restype2 + Parameters.restype2, ], urlParameters: [Parameters.url], headerParameters: [ Parameters.version, Parameters.requestId, - Parameters.accept1 + Parameters.accept1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const acquireLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.ContainerAcquireLeaseHeaders + headersMapper: Mappers.ContainerAcquireLeaseHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders - } + headersMapper: Mappers.ContainerAcquireLeaseExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.restype2, - Parameters.comp10 + Parameters.comp10, ], urlParameters: [Parameters.url], headerParameters: [ @@ -559,27 +495,27 @@ const acquireLeaseOperationSpec = { Parameters.ifUnmodifiedSince, Parameters.action, Parameters.duration, - Parameters.proposedLeaseId + Parameters.proposedLeaseId, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const releaseLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.ContainerReleaseLeaseHeaders + headersMapper: Mappers.ContainerReleaseLeaseHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders - } + headersMapper: Mappers.ContainerReleaseLeaseExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.restype2, - Parameters.comp10 + Parameters.comp10, ], urlParameters: [Parameters.url], headerParameters: [ @@ -589,27 +525,27 @@ const releaseLeaseOperationSpec = { Parameters.ifModifiedSince, Parameters.ifUnmodifiedSince, Parameters.action1, - Parameters.leaseId1 + Parameters.leaseId1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const renewLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.ContainerRenewLeaseHeaders + headersMapper: Mappers.ContainerRenewLeaseHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders - } + headersMapper: Mappers.ContainerRenewLeaseExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.restype2, - Parameters.comp10 + Parameters.comp10, ], urlParameters: [Parameters.url], headerParameters: [ @@ -619,27 +555,27 @@ const renewLeaseOperationSpec = { Parameters.ifModifiedSince, Parameters.ifUnmodifiedSince, Parameters.leaseId1, - Parameters.action2 + Parameters.action2, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const breakLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 202: { - headersMapper: Mappers.ContainerBreakLeaseHeaders + headersMapper: Mappers.ContainerBreakLeaseHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders - } + headersMapper: Mappers.ContainerBreakLeaseExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.restype2, - Parameters.comp10 + Parameters.comp10, ], urlParameters: [Parameters.url], headerParameters: [ @@ -649,27 +585,27 @@ const breakLeaseOperationSpec = { Parameters.ifModifiedSince, Parameters.ifUnmodifiedSince, Parameters.action3, - Parameters.breakPeriod + Parameters.breakPeriod, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const changeLeaseOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.ContainerChangeLeaseHeaders + headersMapper: Mappers.ContainerChangeLeaseHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders - } + headersMapper: Mappers.ContainerChangeLeaseExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.restype2, - Parameters.comp10 + Parameters.comp10, ], urlParameters: [Parameters.url], headerParameters: [ @@ -680,10 +616,10 @@ const changeLeaseOperationSpec = { Parameters.ifUnmodifiedSince, Parameters.leaseId1, Parameters.action4, - Parameters.proposedLeaseId1 + Parameters.proposedLeaseId1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const listBlobFlatSegmentOperationSpec = { path: "/{containerName}", @@ -691,12 +627,12 @@ const listBlobFlatSegmentOperationSpec = { responses: { 200: { bodyMapper: Mappers.ListBlobsFlatSegmentResponse, - headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders + headersMapper: Mappers.ContainerListBlobFlatSegmentHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders - } + headersMapper: Mappers.ContainerListBlobFlatSegmentExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, @@ -705,16 +641,16 @@ const listBlobFlatSegmentOperationSpec = { Parameters.marker, Parameters.maxPageSize, Parameters.restype2, - Parameters.include1 + Parameters.include1, ], urlParameters: [Parameters.url], headerParameters: [ Parameters.version, Parameters.requestId, - Parameters.accept1 + Parameters.accept1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const listBlobHierarchySegmentOperationSpec = { path: "/{containerName}", @@ -722,12 +658,12 @@ const listBlobHierarchySegmentOperationSpec = { responses: { 200: { bodyMapper: Mappers.ListBlobsHierarchySegmentResponse, - headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders + headersMapper: Mappers.ContainerListBlobHierarchySegmentHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders - } + headersMapper: Mappers.ContainerListBlobHierarchySegmentExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, @@ -737,33 +673,33 @@ const listBlobHierarchySegmentOperationSpec = { Parameters.maxPageSize, Parameters.restype2, Parameters.include1, - Parameters.delimiter + Parameters.delimiter, ], urlParameters: [Parameters.url], headerParameters: [ Parameters.version, Parameters.requestId, - Parameters.accept1 + Parameters.accept1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const getAccountInfoOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { - headersMapper: Mappers.ContainerGetAccountInfoHeaders + headersMapper: Mappers.ContainerGetAccountInfoHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders - } + headersMapper: Mappers.ContainerGetAccountInfoExceptionHeaders, + }, }, queryParameters: [Parameters.comp, Parameters.restype1], urlParameters: [Parameters.url], headerParameters: [Parameters.version, Parameters.accept1], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; //# sourceMappingURL=container.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js index a014a5ad7..bd7c27a45 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/pageBlob.js @@ -5,11 +5,11 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import * as coreHttp from "@azure/core-http"; +import * as coreClient from "@azure/core-client"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; -/** Class representing a PageBlob. */ -export class PageBlob { +/** Class containing PageBlob operations. */ +export class PageBlobImpl { /** * Initialize a new instance of the class PageBlob class. * @param client Reference to the service client @@ -25,12 +25,7 @@ export class PageBlob { * @param options The options parameters. */ create(contentLength, blobContentLength, options) { - const operationArguments = { - contentLength, - blobContentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec); + return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec); } /** * The Upload Pages operation writes a range of pages to a page blob @@ -39,12 +34,7 @@ export class PageBlob { * @param options The options parameters. */ uploadPages(contentLength, body, options) { - const operationArguments = { - contentLength, - body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); + return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec); } /** * The Clear Pages operation clears a set of pages from a page blob @@ -52,11 +42,7 @@ export class PageBlob { * @param options The options parameters. */ clearPages(contentLength, options) { - const operationArguments = { - contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); + return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec); } /** * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a @@ -70,14 +56,7 @@ export class PageBlob { * @param options The options parameters. */ uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { - const operationArguments = { - sourceUrl, - sourceRange, - contentLength, - range, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); + return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec); } /** * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a @@ -85,10 +64,7 @@ export class PageBlob { * @param options The options parameters. */ getPageRanges(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); + return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); } /** * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were @@ -96,10 +72,7 @@ export class PageBlob { * @param options The options parameters. */ getPageRangesDiff(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); + return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); } /** * Resize the Blob @@ -108,11 +81,7 @@ export class PageBlob { * @param options The options parameters. */ resize(blobContentLength, options) { - const operationArguments = { - blobContentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); + return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec); } /** * Update the sequence number of the blob @@ -122,11 +91,7 @@ export class PageBlob { * @param options The options parameters. */ updateSequenceNumber(sequenceNumberAction, options) { - const operationArguments = { - sequenceNumberAction, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); + return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec); } /** * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. @@ -141,27 +106,22 @@ export class PageBlob { * @param options The options parameters. */ copyIncremental(copySource, options) { - const operationArguments = { - copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); + return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec); } } // Operation Specifications -const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const xmlSerializer = coreClient.createSerializer(Mappers, /* isXml */ true); const createOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.PageBlobCreateHeaders + headersMapper: Mappers.PageBlobCreateHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobCreateExceptionHeaders - } + headersMapper: Mappers.PageBlobCreateExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds], urlParameters: [Parameters.url], @@ -194,22 +154,22 @@ const createOperationSpec = { Parameters.legalHold1, Parameters.blobType, Parameters.blobContentLength, - Parameters.blobSequenceNumber + Parameters.blobSequenceNumber, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const uploadPagesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.PageBlobUploadPagesHeaders + headersMapper: Mappers.PageBlobUploadPagesHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders - } + headersMapper: Mappers.PageBlobUploadPagesExceptionHeaders, + }, }, requestBody: Parameters.body1, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], @@ -236,22 +196,24 @@ const uploadPagesOperationSpec = { Parameters.pageWrite, Parameters.ifSequenceNumberLessThanOrEqualTo, Parameters.ifSequenceNumberLessThan, - Parameters.ifSequenceNumberEqualTo + Parameters.ifSequenceNumberEqualTo, ], + isXML: true, + contentType: "application/xml; charset=utf-8", mediaType: "binary", - serializer + serializer: xmlSerializer, }; const clearPagesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.PageBlobClearPagesHeaders + headersMapper: Mappers.PageBlobClearPagesHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobClearPagesExceptionHeaders - } + headersMapper: Mappers.PageBlobClearPagesExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], urlParameters: [Parameters.url], @@ -274,22 +236,22 @@ const clearPagesOperationSpec = { Parameters.ifSequenceNumberLessThanOrEqualTo, Parameters.ifSequenceNumberLessThan, Parameters.ifSequenceNumberEqualTo, - Parameters.pageWrite1 + Parameters.pageWrite1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const uploadPagesFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { - headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders + headersMapper: Mappers.PageBlobUploadPagesFromURLHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders - } + headersMapper: Mappers.PageBlobUploadPagesFromURLExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp19], urlParameters: [Parameters.url], @@ -321,10 +283,10 @@ const uploadPagesFromURLOperationSpec = { Parameters.sourceUrl, Parameters.sourceRange, Parameters.sourceContentCrc64, - Parameters.range1 + Parameters.range1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const getPageRangesOperationSpec = { path: "/{containerName}/{blob}", @@ -332,19 +294,19 @@ const getPageRangesOperationSpec = { responses: { 200: { bodyMapper: Mappers.PageList, - headersMapper: Mappers.PageBlobGetPageRangesHeaders + headersMapper: Mappers.PageBlobGetPageRangesHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders - } + headersMapper: Mappers.PageBlobGetPageRangesExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.marker, Parameters.maxPageSize, Parameters.snapshot, - Parameters.comp20 + Parameters.comp20, ], urlParameters: [Parameters.url], headerParameters: [ @@ -357,10 +319,10 @@ const getPageRangesOperationSpec = { Parameters.range, Parameters.ifMatch, Parameters.ifNoneMatch, - Parameters.ifTags + Parameters.ifTags, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const getPageRangesDiffOperationSpec = { path: "/{containerName}/{blob}", @@ -368,12 +330,12 @@ const getPageRangesDiffOperationSpec = { responses: { 200: { bodyMapper: Mappers.PageList, - headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders + headersMapper: Mappers.PageBlobGetPageRangesDiffHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders - } + headersMapper: Mappers.PageBlobGetPageRangesDiffExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, @@ -381,7 +343,7 @@ const getPageRangesDiffOperationSpec = { Parameters.maxPageSize, Parameters.snapshot, Parameters.comp20, - Parameters.prevsnapshot + Parameters.prevsnapshot, ], urlParameters: [Parameters.url], headerParameters: [ @@ -395,22 +357,22 @@ const getPageRangesDiffOperationSpec = { Parameters.ifMatch, Parameters.ifNoneMatch, Parameters.ifTags, - Parameters.prevSnapshotUrl + Parameters.prevSnapshotUrl, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const resizeOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.PageBlobResizeHeaders + headersMapper: Mappers.PageBlobResizeHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobResizeExceptionHeaders - } + headersMapper: Mappers.PageBlobResizeExceptionHeaders, + }, }, queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], urlParameters: [Parameters.url], @@ -428,22 +390,22 @@ const resizeOperationSpec = { Parameters.ifNoneMatch, Parameters.ifTags, Parameters.encryptionScope, - Parameters.blobContentLength + Parameters.blobContentLength, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const updateSequenceNumberOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { - headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders + headersMapper: Mappers.PageBlobUpdateSequenceNumberHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders - } + headersMapper: Mappers.PageBlobUpdateSequenceNumberExceptionHeaders, + }, }, queryParameters: [Parameters.comp, Parameters.timeoutInSeconds], urlParameters: [Parameters.url], @@ -458,22 +420,22 @@ const updateSequenceNumberOperationSpec = { Parameters.ifNoneMatch, Parameters.ifTags, Parameters.blobSequenceNumber, - Parameters.sequenceNumberAction + Parameters.sequenceNumberAction, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const copyIncrementalOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { - headersMapper: Mappers.PageBlobCopyIncrementalHeaders + headersMapper: Mappers.PageBlobCopyIncrementalHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders - } + headersMapper: Mappers.PageBlobCopyIncrementalExceptionHeaders, + }, }, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp21], urlParameters: [Parameters.url], @@ -486,9 +448,9 @@ const copyIncrementalOperationSpec = { Parameters.ifMatch, Parameters.ifNoneMatch, Parameters.ifTags, - Parameters.copySource + Parameters.copySource, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; //# sourceMappingURL=pageBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js index 106bca828..dc1070069 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operations/service.js @@ -5,11 +5,11 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import * as coreHttp from "@azure/core-http"; +import * as coreClient from "@azure/core-client"; import * as Mappers from "../models/mappers"; import * as Parameters from "../models/parameters"; -/** Class representing a Service. */ -export class Service { +/** Class containing Service operations. */ +export class ServiceImpl { /** * Initialize a new instance of the class Service class. * @param client Reference to the service client @@ -24,11 +24,7 @@ export class Service { * @param options The options parameters. */ setProperties(blobServiceProperties, options) { - const operationArguments = { - blobServiceProperties, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); + return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec); } /** * gets the properties of a storage account's Blob service, including properties for Storage Analytics @@ -36,10 +32,7 @@ export class Service { * @param options The options parameters. */ getProperties(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); } /** * Retrieves statistics related to replication for the Blob service. It is only available on the @@ -48,20 +41,14 @@ export class Service { * @param options The options parameters. */ getStatistics(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); + return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); } /** * The List Containers Segment operation returns a list of the containers under the specified account * @param options The options parameters. */ listContainersSegment(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); + return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); } /** * Retrieves a user delegation key for the Blob service. This is only a valid operation when using @@ -70,21 +57,14 @@ export class Service { * @param options The options parameters. */ getUserDelegationKey(keyInfo, options) { - const operationArguments = { - keyInfo, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); + return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. @@ -95,13 +75,7 @@ export class Service { * @param options The options parameters. */ submitBatch(contentLength, multipartContentType, body, options) { - const operationArguments = { - contentLength, - multipartContentType, - body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a @@ -110,43 +84,40 @@ export class Service { * @param options The options parameters. */ filterBlobs(options) { - const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); } } // Operation Specifications -const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const xmlSerializer = coreClient.createSerializer(Mappers, /* isXml */ true); const setPropertiesOperationSpec = { path: "/", httpMethod: "PUT", responses: { 202: { - headersMapper: Mappers.ServiceSetPropertiesHeaders + headersMapper: Mappers.ServiceSetPropertiesHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders - } + headersMapper: Mappers.ServiceSetPropertiesExceptionHeaders, + }, }, requestBody: Parameters.blobServiceProperties, queryParameters: [ Parameters.restype, Parameters.comp, - Parameters.timeoutInSeconds + Parameters.timeoutInSeconds, ], urlParameters: [Parameters.url], headerParameters: [ Parameters.contentType, Parameters.accept, Parameters.version, - Parameters.requestId + Parameters.requestId, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer, }; const getPropertiesOperationSpec = { path: "/", @@ -154,26 +125,26 @@ const getPropertiesOperationSpec = { responses: { 200: { bodyMapper: Mappers.BlobServiceProperties, - headersMapper: Mappers.ServiceGetPropertiesHeaders + headersMapper: Mappers.ServiceGetPropertiesHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders - } + headersMapper: Mappers.ServiceGetPropertiesExceptionHeaders, + }, }, queryParameters: [ Parameters.restype, Parameters.comp, - Parameters.timeoutInSeconds + Parameters.timeoutInSeconds, ], urlParameters: [Parameters.url], headerParameters: [ Parameters.version, Parameters.requestId, - Parameters.accept1 + Parameters.accept1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const getStatisticsOperationSpec = { path: "/", @@ -181,26 +152,26 @@ const getStatisticsOperationSpec = { responses: { 200: { bodyMapper: Mappers.BlobServiceStatistics, - headersMapper: Mappers.ServiceGetStatisticsHeaders + headersMapper: Mappers.ServiceGetStatisticsHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders - } + headersMapper: Mappers.ServiceGetStatisticsExceptionHeaders, + }, }, queryParameters: [ Parameters.restype, Parameters.timeoutInSeconds, - Parameters.comp1 + Parameters.comp1, ], urlParameters: [Parameters.url], headerParameters: [ Parameters.version, Parameters.requestId, - Parameters.accept1 + Parameters.accept1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const listContainersSegmentOperationSpec = { path: "/", @@ -208,12 +179,12 @@ const listContainersSegmentOperationSpec = { responses: { 200: { bodyMapper: Mappers.ListContainersSegmentResponse, - headersMapper: Mappers.ServiceListContainersSegmentHeaders + headersMapper: Mappers.ServiceListContainersSegmentHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders - } + headersMapper: Mappers.ServiceListContainersSegmentExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, @@ -221,16 +192,16 @@ const listContainersSegmentOperationSpec = { Parameters.prefix, Parameters.marker, Parameters.maxPageSize, - Parameters.include + Parameters.include, ], urlParameters: [Parameters.url], headerParameters: [ Parameters.version, Parameters.requestId, - Parameters.accept1 + Parameters.accept1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const getUserDelegationKeyOperationSpec = { path: "/", @@ -238,48 +209,48 @@ const getUserDelegationKeyOperationSpec = { responses: { 200: { bodyMapper: Mappers.UserDelegationKey, - headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders + headersMapper: Mappers.ServiceGetUserDelegationKeyHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders - } + headersMapper: Mappers.ServiceGetUserDelegationKeyExceptionHeaders, + }, }, requestBody: Parameters.keyInfo, queryParameters: [ Parameters.restype, Parameters.timeoutInSeconds, - Parameters.comp3 + Parameters.comp3, ], urlParameters: [Parameters.url], headerParameters: [ Parameters.contentType, Parameters.accept, Parameters.version, - Parameters.requestId + Parameters.requestId, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer, }; const getAccountInfoOperationSpec = { path: "/", httpMethod: "GET", responses: { 200: { - headersMapper: Mappers.ServiceGetAccountInfoHeaders + headersMapper: Mappers.ServiceGetAccountInfoHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders - } + headersMapper: Mappers.ServiceGetAccountInfoExceptionHeaders, + }, }, queryParameters: [Parameters.comp, Parameters.restype1], urlParameters: [Parameters.url], headerParameters: [Parameters.version, Parameters.accept1], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; const submitBatchOperationSpec = { path: "/", @@ -288,30 +259,29 @@ const submitBatchOperationSpec = { 202: { bodyMapper: { type: { name: "Stream" }, - serializedName: "parsedResponse" + serializedName: "parsedResponse", }, - headersMapper: Mappers.ServiceSubmitBatchHeaders + headersMapper: Mappers.ServiceSubmitBatchHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders - } + headersMapper: Mappers.ServiceSubmitBatchExceptionHeaders, + }, }, requestBody: Parameters.body, queryParameters: [Parameters.timeoutInSeconds, Parameters.comp4], urlParameters: [Parameters.url], headerParameters: [ - Parameters.contentType, Parameters.accept, Parameters.version, Parameters.requestId, Parameters.contentLength, - Parameters.multipartContentType + Parameters.multipartContentType, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer, }; const filterBlobsOperationSpec = { path: "/", @@ -319,27 +289,27 @@ const filterBlobsOperationSpec = { responses: { 200: { bodyMapper: Mappers.FilterBlobSegment, - headersMapper: Mappers.ServiceFilterBlobsHeaders + headersMapper: Mappers.ServiceFilterBlobsHeaders, }, default: { bodyMapper: Mappers.StorageError, - headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders - } + headersMapper: Mappers.ServiceFilterBlobsExceptionHeaders, + }, }, queryParameters: [ Parameters.timeoutInSeconds, Parameters.marker, Parameters.maxPageSize, Parameters.comp5, - Parameters.where + Parameters.where, ], urlParameters: [Parameters.url], headerParameters: [ Parameters.version, Parameters.requestId, - Parameters.accept1 + Parameters.accept1, ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer, }; //# sourceMappingURL=service.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/appendBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/appendBlob.js new file mode 100644 index 000000000..1507c6e6a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/appendBlob.js @@ -0,0 +1,9 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export {}; +//# sourceMappingURL=appendBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blob.js new file mode 100644 index 000000000..f4a13f092 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blob.js @@ -0,0 +1,9 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export {}; +//# sourceMappingURL=blob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blockBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blockBlob.js new file mode 100644 index 000000000..98b4c29b6 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/blockBlob.js @@ -0,0 +1,9 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export {}; +//# sourceMappingURL=blockBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/container.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/container.js new file mode 100644 index 000000000..deadab787 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/container.js @@ -0,0 +1,9 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export {}; +//# sourceMappingURL=container.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/index.js new file mode 100644 index 000000000..eca268792 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/index.js @@ -0,0 +1,14 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export * from "./service"; +export * from "./container"; +export * from "./blob"; +export * from "./pageBlob"; +export * from "./appendBlob"; +export * from "./blockBlob"; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/pageBlob.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/pageBlob.js new file mode 100644 index 000000000..bc8c94f14 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/pageBlob.js @@ -0,0 +1,9 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export {}; +//# sourceMappingURL=pageBlob.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/service.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/service.js new file mode 100644 index 000000000..c62ec5d37 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/operationsInterfaces/service.js @@ -0,0 +1,9 @@ +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +export {}; +//# sourceMappingURL=service.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js index dfcaa045c..1e2738fb7 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClient.js @@ -5,9 +5,9 @@ * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -import { Service, Container, Blob, PageBlob, AppendBlob, BlockBlob } from "./operations"; -import { StorageClientContext } from "./storageClientContext"; -export class StorageClient extends StorageClientContext { +import * as coreHttpCompat from "@azure/core-http-compat"; +import { ServiceImpl, ContainerImpl, BlobImpl, PageBlobImpl, AppendBlobImpl, BlockBlobImpl, } from "./operations"; +export class StorageClient extends coreHttpCompat.ExtendedServiceClient { /** * Initializes a new instance of the StorageClient class. * @param url The URL of the service account, container, or blob that is the target of the desired @@ -15,13 +15,35 @@ export class StorageClient extends StorageClientContext { * @param options The parameter options */ constructor(url, options) { - super(url, options); - this.service = new Service(this); - this.container = new Container(this); - this.blob = new Blob(this); - this.pageBlob = new PageBlob(this); - this.appendBlob = new AppendBlob(this); - this.blockBlob = new BlockBlob(this); + var _a, _b; + if (url === undefined) { + throw new Error("'url' cannot be null"); + } + // Initializing default values for options + if (!options) { + options = {}; + } + const defaults = { + requestContentType: "application/json; charset=utf-8", + }; + const packageDetails = `azsdk-js-azure-storage-blob/12.23.0`; + const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix + ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: { + userAgentPrefix, + }, endpoint: (_b = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri) !== null && _b !== void 0 ? _b : "{url}" }); + super(optionsWithDefaults); + // Parameter assignments + this.url = url; + // Assigning values to Constant parameters + this.version = options.version || "2023-11-03"; + this.service = new ServiceImpl(this); + this.container = new ContainerImpl(this); + this.blob = new BlobImpl(this); + this.pageBlob = new PageBlobImpl(this); + this.appendBlob = new AppendBlobImpl(this); + this.blockBlob = new BlockBlobImpl(this); } } //# sourceMappingURL=storageClient.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js deleted file mode 100644 index d9c7793fa..000000000 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/generated/src/storageClientContext.js +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -import * as coreHttp from "@azure/core-http"; -const packageName = "azure-storage-blob"; -const packageVersion = "12.17.0"; -export class StorageClientContext extends coreHttp.ServiceClient { - /** - * Initializes a new instance of the StorageClientContext class. - * @param url The URL of the service account, container, or blob that is the target of the desired - * operation. - * @param options The parameter options - */ - constructor(url, options) { - if (url === undefined) { - throw new Error("'url' cannot be null"); - } - // Initializing default values for options - if (!options) { - options = {}; - } - if (!options.userAgent) { - const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); - options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; - } - super(undefined, options); - this.requestContentType = "application/json; charset=utf-8"; - this.baseUri = options.endpoint || "{url}"; - // Parameter assignments - this.url = url; - // Assigning values to Constant parameters - this.version = options.version || "2023-11-03"; - } -} -//# sourceMappingURL=storageClientContext.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js index 93dc89c13..49e48cd40 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.browser.js @@ -1,6 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { RestError } from "@azure/core-http"; +import { RestError } from "@azure/core-rest-pipeline"; export * from "./BlobServiceClient"; export * from "./Clients"; export * from "./ContainerClient"; @@ -12,7 +12,8 @@ export * from "./StorageBrowserPolicyFactory"; export * from "./credentials/AnonymousCredential"; export * from "./credentials/Credential"; export { BlockBlobTier, PremiumPageBlobTier, } from "./models"; -export * from "./Pipeline"; +export { Pipeline, isPipelineLike, newPipeline, StorageOAuthScopes, } from "./Pipeline"; +export { BaseRequestPolicy } from "./policies/RequestPolicy"; export * from "./policies/AnonymousCredentialPolicy"; export * from "./policies/CredentialPolicy"; export * from "./StorageRetryPolicyFactory"; diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js index 36c63bb11..76878f8a9 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/index.js @@ -1,6 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { RestError } from "@azure/core-http"; +import { RestError } from "@azure/core-rest-pipeline"; export * from "./BlobServiceClient"; export * from "./Clients"; export * from "./ContainerClient"; @@ -20,7 +20,8 @@ export * from "./credentials/AnonymousCredential"; export * from "./credentials/Credential"; export * from "./credentials/StorageSharedKeyCredential"; export { BlockBlobTier, PremiumPageBlobTier, StorageBlobAudience, getBlobServiceAccountAudience, } from "./models"; -export * from "./Pipeline"; +export { Pipeline, isPipelineLike, newPipeline, StorageOAuthScopes, } from "./Pipeline"; +export { BaseRequestPolicy } from "./policies/RequestPolicy"; export * from "./policies/AnonymousCredentialPolicy"; export * from "./policies/CredentialPolicy"; export * from "./StorageRetryPolicyFactory"; diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js index f4d40170c..ea73b9e0b 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/models.js @@ -105,6 +105,10 @@ export var StorageBlobAudience; */ StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; })(StorageBlobAudience || (StorageBlobAudience = {})); +/** + * + * To get OAuth audience for a storage account for blob service. + */ export function getBlobServiceAccountAudience(storageAccountName) { return `https://${storageAccountName}.blob.core.windows.net/.default`; } diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js index a2f1fccc7..3182e092f 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/CredentialPolicy.js @@ -1,6 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { BaseRequestPolicy } from "@azure/core-http"; +import { BaseRequestPolicy } from "./RequestPolicy"; /** * Credential policy used to sign HTTP(S) requests before sending. This is an * abstract class. diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/RequestPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/RequestPolicy.js new file mode 100644 index 000000000..a6c8fcd5a --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/RequestPolicy.js @@ -0,0 +1,40 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The base class from which all request policies derive. + */ +export class BaseRequestPolicy { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; + } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); + } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); + } +} +//# sourceMappingURL=RequestPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js deleted file mode 100644 index a272017ba..000000000 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBearerTokenChallengeAuthenticationPolicy.js +++ /dev/null @@ -1,245 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { URLBuilder } from "@azure/core-http"; -import { BaseRequestPolicy, } from "@azure/core-http"; -import { delay } from "@azure/core-http"; -/** - * A set of constants used internally when processing requests. - */ -const Constants = { - DefaultScope: "/.default", - /** - * Defines constants for use with HTTP headers. - */ - HeaderConstants: { - /** - * The Authorization header. - */ - AUTHORIZATION: "authorization", - }, -}; -// Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1000, - retryIntervalInMs: 3000, - refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry -}; -/** - * Converts an an unreliable access token getter (which may resolve with null) - * into an AccessTokenGetter by retrying the unreliable getter in a regular - * interval. - * - * @param getAccessToken - a function that produces a promise of an access - * token that may fail by returning null - * @param retryIntervalInMs - the time (in milliseconds) to wait between retry - * attempts - * @param timeoutInMs - the timestamp after which the refresh attempt will fail, - * throwing an exception - * @returns - a promise that, if it resolves, will resolve with an access token - */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } - } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; - } - } - let token = await tryGetAccessToken(); - while (token === null) { - await delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; -} -/** - * Creates a token cycler from a credential, scopes, and optional settings. - * - * A token cycler represents a way to reliably retrieve a valid access token - * from a TokenCredential. It will handle initializing the token, refreshing it - * when it nears expiration, and synchronizes refresh attempts to avoid - * concurrency hazards. - * - * @param credential - the underlying TokenCredential that provides the access - * token - * @param scopes - the scopes to request authorization for - * @param tokenCyclerOptions - optionally override default settings for the cycler - * - * @returns - a function that reliably produces a valid access token - */ -function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); - /** - * This little holder defines several predicates that we use to construct - * the rules of refreshing the token. - */ - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; - }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - var _a; - return (!cycler.isRefreshing && - ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); - }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); - }, - }; - /** - * Starts a refresh job or returns the existing job if one is already - * running. - */ - function refresh(getTokenOptions) { - var _a; - if (!cycler.isRefreshing) { - // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - // Take advantage of promise chaining to insert an assignment to `token` - // before the refresh can be considered done. - refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { - refreshWorker = null; - token = _token; - return token; - }) - .catch((reason) => { - // We also should reset the refresher if we enter a failed state. All - // existing awaiters will throw, but subsequent requests will start a - // new retry chain. - refreshWorker = null; - token = null; - throw reason; - }); - } - return refreshWorker; - } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; -} -/** - * We will retrieve the challenge only if the response status code was 401, - * and if the response contained the header "WWW-Authenticate" with a non-empty value. - */ -function getChallenge(response) { - const challenge = response.headers.get("WWW-Authenticate"); - if (response.status === 401 && challenge) { - return challenge; - } - return; -} -/** - * Converts: `Bearer a="b" c="d"`. - * Into: `[ { a: 'b', c: 'd' }]`. - * - * @internal - */ -function parseChallenge(challenge) { - const bearerChallenge = challenge.slice("Bearer ".length); - const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); - const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); - // Key-value pairs to plain object: - return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); -} -// #endregion -/** - * Creates a new factory for a RequestPolicy that applies a bearer token to - * the requests' `Authorization` headers. - * - * @param credential - The TokenCredential implementation that can supply the bearer token. - * @param scopes - The scopes for which the bearer token applies. - */ -export function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { - // This simple function encapsulates the entire process of reliably retrieving the token - let getToken = createTokenCycler(credential, scopes); - class StorageBearerTokenChallengeAuthenticationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const getTokenInternal = getToken; - const token = (await getTokenInternal({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext, - }, - })).token; - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - const response = await this._nextPolicy.sendRequest(webResource); - if ((response === null || response === void 0 ? void 0 : response.status) === 401) { - const challenge = getChallenge(response); - if (challenge) { - const challengeInfo = parseChallenge(challenge); - const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; - const parsedAuthUri = URLBuilder.parse(challengeInfo.authorization_uri); - const pathSegments = parsedAuthUri.getPath().split("/"); - const tenantId = pathSegments[1]; - const getTokenForChallenge = createTokenCycler(credential, challengeScopes); - const tokenForChallenge = (await getTokenForChallenge({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext, - }, - tenantId: tenantId, - })).token; - getToken = getTokenForChallenge; - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); - return this._nextPolicy.sendRequest(webResource); - } - } - return response; - } - } - return { - create: (nextPolicy, options) => { - return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); - }, - }; -} -//# sourceMappingURL=StorageBearerTokenChallengeAuthenticationPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js index ad35d504c..dc573c768 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicy.js @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { BaseRequestPolicy, isNode, } from "@azure/core-http"; +import { BaseRequestPolicy } from "./RequestPolicy"; +import { isNode } from "@azure/core-util"; import { HeaderConstants, URLConstants } from "../utils/constants"; import { setURLParameter } from "../utils/utils.common"; /** diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicyV2.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicyV2.js new file mode 100644 index 000000000..c4cddc1af --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageBrowserPolicyV2.js @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { isNode } from "@azure/core-util"; +import { HeaderConstants, URLConstants } from "../utils/constants"; +import { setURLParameter } from "../utils/utils.common"; +/** + * The programmatic identifier of the StorageBrowserPolicy. + */ +export const storageBrowserPolicyName = "storageBrowserPolicy"; +/** + * storageBrowserPolicy is a policy used to prevent browsers from caching requests + * and to remove cookies and explicit content-length headers. + */ +export function storageBrowserPolicy() { + return { + name: storageBrowserPolicyName, + async sendRequest(request, next) { + if (isNode) { + return next(request); + } + if (request.method === "GET" || request.method === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.delete(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.delete(HeaderConstants.CONTENT_LENGTH); + return next(request); + }, + }; +} +//# sourceMappingURL=StorageBrowserPolicyV2.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js index 040037381..04c5b58d6 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicy.js @@ -1,7 +1,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. import { AbortError } from "@azure/abort-controller"; -import { BaseRequestPolicy, } from "@azure/core-http"; +import { BaseRequestPolicy } from "./RequestPolicy"; import { URLConstants } from "../utils/constants"; import { delay, setURLHost, setURLParameter } from "../utils/utils.common"; import { logger } from "../log"; diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicyV2.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicyV2.js new file mode 100644 index 000000000..f719645d7 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageRetryPolicyV2.js @@ -0,0 +1,165 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; +import { isRestError, RestError, } from "@azure/core-rest-pipeline"; +import { getErrorMessage } from "@azure/core-util"; +import { URLConstants } from "../utils/constants"; +import { delay, setURLHost, setURLParameter } from "../utils/utils.common"; +import { logger } from "../log"; +/** + * Name of the {@link storageRetryPolicy} + */ +export const storageRetryPolicyName = "storageRetryPolicy"; +/** + * RetryPolicy types. + */ +export var StorageRetryPolicyType; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(StorageRetryPolicyType || (StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy +}; +const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", +]; +const RETRY_ABORT_ERROR = new AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +export function storageRetryPolicy(options = {}) { + var _a, _b, _c, _d, _e, _f; + const retryPolicyType = (_a = options.retryPolicyType) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_OPTIONS.retryPolicyType; + const maxTries = (_b = options.maxTries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_OPTIONS.maxTries; + const retryDelayInMs = (_c = options.retryDelayInMs) !== null && _c !== void 0 ? _c : DEFAULT_RETRY_OPTIONS.retryDelayInMs; + const maxRetryDelayInMs = (_d = options.maxRetryDelayInMs) !== null && _d !== void 0 ? _d : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; + const secondaryHost = (_e = options.secondaryHost) !== null && _e !== void 0 ? _e : DEFAULT_RETRY_OPTIONS.secondaryHost; + const tryTimeoutInMs = (_f = options.tryTimeoutInMs) !== null && _f !== void 0 ? _f : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; + function shouldRetry({ isPrimaryRetry, attempt, response, error, }) { + var _a, _b; + if (attempt >= maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); + return false; + } + if (error) { + for (const retriableError of retriableErrors) { + if (error.name.toUpperCase().includes(retriableError) || + error.message.toUpperCase().includes(retriableError) || + (error.code && error.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; + } + } + if ((error === null || error === void 0 ? void 0 : error.code) === "PARSE_ERROR" && + (error === null || error === void 0 ? void 0 : error.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || error) { + const statusCode = (_b = (_a = response === null || response === void 0 ? void 0 : response.status) !== null && _a !== void 0 ? _a : error === null || error === void 0 ? void 0 : error.statusCode) !== null && _b !== void 0 ? _b : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + return false; + } + function calculateDelay(isPrimaryRetry, attempt) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (retryPolicyType) { + case StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); + break; + case StorageRetryPolicyType.FIXED: + delayTimeInMs = retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delayTimeInMs; + } + return { + name: storageRetryPolicyName, + async sendRequest(request, next) { + // Set the server-side timeout query parameter "timeout=[seconds]" + if (tryTimeoutInMs) { + request.url = setURLParameter(request.url, URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1000))); + } + const primaryUrl = request.url; + const secondaryUrl = secondaryHost ? setURLHost(request.url, secondaryHost) : undefined; + let secondaryHas404 = false; + let attempt = 1; + let retryAgain = true; + let response; + let error; + while (retryAgain) { + const isPrimaryRetry = secondaryHas404 || + !secondaryUrl || + !["GET", "HEAD", "OPTIONS"].includes(request.method) || + attempt % 2 === 1; + request.url = isPrimaryRetry ? primaryUrl : secondaryUrl; + response = undefined; + error = undefined; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await next(request); + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (e) { + if (isRestError(e)) { + logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); + error = e; + } + else { + logger.error(`RetryPolicy: Caught error, message: ${getErrorMessage(e)}`); + throw e; + } + } + retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error }); + if (retryAgain) { + await delay(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); + } + attempt++; + } + if (response) { + return response; + } + throw error !== null && error !== void 0 ? error : new RestError("RetryPolicy failed without known error."); + }, + }; +} +//# sourceMappingURL=StorageRetryPolicyV2.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicyV2.browser.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicyV2.browser.js new file mode 100644 index 000000000..cfcfda41d --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicyV2.browser.js @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the storageSharedKeyCredentialPolicy. + */ +export const storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; +/** + * storageSharedKeyCredentialPolicy handles signing requests using storage account keys. + */ +export function storageSharedKeyCredentialPolicy(_options) { + return { + name: storageSharedKeyCredentialPolicyName, + async sendRequest(request, next) { + return next(request); + }, + }; +} +//# sourceMappingURL=StorageSharedKeyCredentialPolicyV2.browser.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicyV2.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicyV2.js new file mode 100644 index 000000000..57417cd46 --- /dev/null +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicyV2.js @@ -0,0 +1,131 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +import { createHmac } from "crypto"; +import { HeaderConstants } from "../utils/constants"; +import { getURLPath, getURLQueries } from "../utils/utils.common"; +/** + * The programmatic identifier of the storageSharedKeyCredentialPolicy. + */ +export const storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; +/** + * storageSharedKeyCredentialPolicy handles signing requests using storage account keys. + */ +export function storageSharedKeyCredentialPolicy(options) { + function signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || Buffer.isBuffer(request.body)) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + getHeaderValueToSign(request, HeaderConstants.DATE), + getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + getCanonicalizedHeadersString(request) + + getCanonicalizedResourceString(request); + const signature = createHmac("sha256", options.accountKey) + .update(stringToSign, "utf8") + .digest("base64"); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + */ + function getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + */ + function getCanonicalizedHeadersString(request) { + let headersArray = []; + for (const [name, value] of request.headers) { + if (name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE)) { + headersArray.push({ name, value }); + } + } + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + function getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${options.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); + } + } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } + return { + name: storageSharedKeyCredentialPolicyName, + async sendRequest(request, next) { + signRequest(request); + return next(request); + }, + }; +} +//# sourceMappingURL=StorageSharedKeyCredentialPolicyV2.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js deleted file mode 100644 index f5b173bbb..000000000 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/policies/TelemetryPolicy.js +++ /dev/null @@ -1,36 +0,0 @@ -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -import { BaseRequestPolicy, HttpHeaders, isNode, } from "@azure/core-http"; -import { HeaderConstants } from "../utils/constants"; -/** - * TelemetryPolicy is a policy used to tag user-agent header for every requests. - */ -export class TelemetryPolicy extends BaseRequestPolicy { - /** - * Creates an instance of TelemetryPolicy. - * @param nextPolicy - - * @param options - - * @param telemetry - - */ - constructor(nextPolicy, options, telemetry) { - super(nextPolicy, options); - this.telemetry = telemetry; - } - /** - * Sends out request. - * - * @param request - - */ - async sendRequest(request) { - if (isNode) { - if (!request.headers) { - request.headers = new HttpHeaders(); - } - if (!request.headers.get(HeaderConstants.USER_AGENT)) { - request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); - } - } - return this._nextPolicy.sendRequest(request); - } -} -//# sourceMappingURL=TelemetryPolicy.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js index 520342455..ebf174c01 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/pollers/BlobStartCopyFromUrlPoller.js @@ -1,6 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { delay } from "@azure/core-http"; +import { delay } from "@azure/core-util"; import { Poller } from "@azure/core-lro"; /** * This is the poller returned by {@link BlobClient.beginCopyFromURL}. diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js index 141073868..0303000c4 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/AccountSASPermissions.js @@ -20,7 +20,7 @@ export class AccountSASPermissions { */ this.write = false; /** - * Permission to create blobs and files granted. + * Permission to delete blobs and files granted. */ this.delete = false; /** diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js index f7d47f9e2..51c34527e 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/BlobSASSignatureValues.js @@ -393,7 +393,7 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD userDelegationKeyCredential.userDelegationKey.signedService, userDelegationKeyCredential.userDelegationKey.signedVersion, blobSASSignatureValues.preauthorizedAgentObjectId, - undefined, + undefined, // agentObjectId blobSASSignatureValues.correlationId, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", @@ -472,7 +472,7 @@ function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userD userDelegationKeyCredential.userDelegationKey.signedService, userDelegationKeyCredential.userDelegationKey.signedVersion, blobSASSignatureValues.preauthorizedAgentObjectId, - undefined, + undefined, // agentObjectId blobSASSignatureValues.correlationId, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js index c3869d63d..7c089a08b 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/sas/SASQueryParameters.js @@ -26,6 +26,20 @@ export var SASProtocol; * NOTE: Instances of this class are immutable. */ export class SASQueryParameters { + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; + } + return undefined; + } constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { this.version = version; this.signature = signature; @@ -85,20 +99,6 @@ export class SASQueryParameters { } } } - /** - * Optional. IP range allowed for this SAS. - * - * @readonly - */ - get ipRange() { - if (this.ipRangeInner) { - return { - end: this.ipRangeInner.end, - start: this.ipRangeInner.start, - }; - } - return undefined; - } /** * Encodes all SAS query parameters into a string that can be appended to a URL. * @@ -114,12 +114,12 @@ export class SASQueryParameters { "sip", "si", "ses", - "skoid", - "sktid", - "skt", - "ske", - "sks", - "skv", + "skoid", // Signed object ID + "sktid", // Signed tenant ID + "skt", // Signed key start time + "ske", // Signed key expiry time + "sks", // Signed key service + "skv", // Signed key version "sr", "sp", "sig", diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js index 2d58e9d23..1c5679ffe 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/RetriableReadableStream.js @@ -1,5 +1,6 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +import { AbortError } from "@azure/abort-controller"; import { Readable } from "stream"; /** * ONLY AVAILABLE IN NODE.JS RUNTIME. @@ -24,8 +25,8 @@ export class RetriableReadableStream extends Readable { if (this.options.doInjectErrorOnce) { this.options.doInjectErrorOnce = undefined; this.source.pause(); - this.source.removeAllListeners("data"); - this.source.emit("end"); + this.sourceErrorOrEndHandler(); + this.source.destroy(); return; } // console.log( @@ -39,6 +40,10 @@ export class RetriableReadableStream extends Readable { this.source.pause(); } }; + this.sourceAbortedHandler = () => { + const abortError = new AbortError("The operation was aborted."); + this.destroy(abortError); + }; this.sourceErrorOrEndHandler = (err) => { if (err && err.name === "AbortError") { this.destroy(err); @@ -95,11 +100,14 @@ export class RetriableReadableStream extends Readable { this.source.on("data", this.sourceDataHandler); this.source.on("end", this.sourceErrorOrEndHandler); this.source.on("error", this.sourceErrorOrEndHandler); + // needed for Node14 + this.source.on("aborted", this.sourceAbortedHandler); } removeSourceEventHandlers() { this.source.removeListener("data", this.sourceDataHandler); this.source.removeListener("end", this.sourceErrorOrEndHandler); this.source.removeListener("error", this.sourceErrorOrEndHandler); + this.source.removeListener("aborted", this.sourceAbortedHandler); } _destroy(error, callback) { // remove listener from source and release source diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js index 131ba8727..de9fbdae7 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/cache.js @@ -1,8 +1,11 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { DefaultHttpClient } from "@azure/core-http"; -const _defaultHttpClient = new DefaultHttpClient(); +import { createDefaultHttpClient } from "@azure/core-rest-pipeline"; +let _defaultHttpClient; export function getCachedDefaultHttpClient() { + if (!_defaultHttpClient) { + _defaultHttpClient = createDefaultHttpClient(); + } return _defaultHttpClient; } //# sourceMappingURL=cache.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js index bbb62f983..599ec6e3d 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/constants.js @@ -1,7 +1,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -export const SDK_VERSION = "12.17.0"; -export const SERVICE_VERSION = "2023-11-03"; +export const SDK_VERSION = "12.23.0"; +export const SERVICE_VERSION = "2024-05-04"; export const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB export const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB export const BLOCK_BLOB_MAX_BLOCKS = 50000; diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js index 66a4527e3..060f1ec35 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/tracing.js @@ -1,27 +1,14 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { createSpanFunction } from "@azure/core-tracing"; +import { createTracingClient } from "@azure/core-tracing"; +import { SDK_VERSION } from "./constants"; /** * Creates a span using the global tracer. * @internal */ -export const createSpan = createSpanFunction({ - packagePrefix: "Azure.Storage.Blob", +export const tracingClient = createTracingClient({ + packageName: "@azure/storage-blob", + packageVersion: SDK_VERSION, namespace: "Microsoft.Storage", }); -/** - * @internal - * - * Adapt the tracing options from OperationOptions to what they need to be for - * RequestOptionsBase (when we update to later OpenTelemetry versions this is now - * two separate fields, not just one). - */ -export function convertTracingToRequestOptionsBase(options) { - var _a, _b; - return { - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, - tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, - }; -} //# sourceMappingURL=tracing.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js index 7faa0b5c6..80ec28e1e 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-blob/src/utils/utils.common.js @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -import { HttpHeaders, isNode, URLBuilder } from "@azure/core-http"; +import { createHttpHeaders } from "@azure/core-rest-pipeline"; +import { isNode } from "@azure/core-util"; import { DevelopmentConnectionString, HeaderConstants, PathStylePorts, URLConstants, } from "./constants"; /** * Reserved URL characters must be properly escaped for Storage services like Blob or File. @@ -55,11 +56,11 @@ import { DevelopmentConnectionString, HeaderConstants, PathStylePorts, URLConsta * @param url - */ export function escapeURLPath(url) { - const urlParsed = URLBuilder.parse(url); - let path = urlParsed.getPath(); + const urlParsed = new URL(url); + let path = urlParsed.pathname; path = path || "/"; path = escape(path); - urlParsed.setPath(path); + urlParsed.pathname = path; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { @@ -144,7 +145,7 @@ export function extractConnectionStringParts(connectionString) { } else { // SAS connection string - const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); let accountName = getValueInConnString(connectionString, "AccountName"); // if accountName is empty, try to read it from BlobEndpoint if (!accountName) { @@ -156,6 +157,10 @@ export function extractConnectionStringParts(connectionString) { else if (!accountSas) { throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); } + // client constructors assume accountSas does *not* start with ? + if (accountSas.startsWith("?")) { + accountSas = accountSas.substring(1); + } return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; } } @@ -180,12 +185,11 @@ function escape(text) { * @returns An updated URL string */ export function appendToURLPath(url, name) { - const urlParsed = URLBuilder.parse(url); - let path = urlParsed.getPath(); + const urlParsed = new URL(url); + let path = urlParsed.pathname; path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; - urlParsed.setPath(path); - const normalizedUrl = new URL(urlParsed.toString()); - return normalizedUrl.toString(); + urlParsed.pathname = path; + return urlParsed.toString(); } /** * Set URL parameter name and value. If name exists in URL parameters, old value @@ -197,8 +201,24 @@ export function appendToURLPath(url, name) { * @returns An updated URL string */ export function setURLParameter(url, name, value) { - const urlParsed = URLBuilder.parse(url); - urlParsed.setQueryParameter(name, value); + const urlParsed = new URL(url); + const encodedName = encodeURIComponent(name); + const encodedValue = value ? encodeURIComponent(value) : undefined; + // mutating searchParams will change the encoding, so we have to do this ourselves + const searchString = urlParsed.search === "" ? "?" : urlParsed.search; + const searchPieces = []; + for (const pair of searchString.slice(1).split("&")) { + if (pair) { + const [key] = pair.split("=", 2); + if (key !== encodedName) { + searchPieces.push(pair); + } + } + } + if (encodedValue) { + searchPieces.push(`${encodedName}=${encodedValue}`); + } + urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; return urlParsed.toString(); } /** @@ -208,8 +228,9 @@ export function setURLParameter(url, name, value) { * @param name - */ export function getURLParameter(url, name) { - const urlParsed = URLBuilder.parse(url); - return urlParsed.getQueryParameterValue(name); + var _a; + const urlParsed = new URL(url); + return (_a = urlParsed.searchParams.get(name)) !== null && _a !== void 0 ? _a : undefined; } /** * Set URL host. @@ -219,8 +240,8 @@ export function getURLParameter(url, name) { * @returns An updated URL string */ export function setURLHost(url, host) { - const urlParsed = URLBuilder.parse(url); - urlParsed.setHost(host); + const urlParsed = new URL(url); + urlParsed.hostname = host; return urlParsed.toString(); } /** @@ -229,8 +250,13 @@ export function setURLHost(url, host) { * @param url - Source URL string */ export function getURLPath(url) { - const urlParsed = URLBuilder.parse(url); - return urlParsed.getPath(); + try { + const urlParsed = new URL(url); + return urlParsed.pathname; + } + catch (e) { + return undefined; + } } /** * Get URL scheme from an URL string. @@ -238,8 +264,13 @@ export function getURLPath(url) { * @param url - Source URL string */ export function getURLScheme(url) { - const urlParsed = URLBuilder.parse(url); - return urlParsed.getScheme(); + try { + const urlParsed = new URL(url); + return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; + } + catch (e) { + return undefined; + } } /** * Get URL path and query from an URL string. @@ -247,12 +278,12 @@ export function getURLScheme(url) { * @param url - Source URL string */ export function getURLPathAndQuery(url) { - const urlParsed = URLBuilder.parse(url); - const pathString = urlParsed.getPath(); + const urlParsed = new URL(url); + const pathString = urlParsed.pathname; if (!pathString) { throw new RangeError("Invalid url without valid path."); } - let queryString = urlParsed.getQuery() || ""; + let queryString = urlParsed.search || ""; queryString = queryString.trim(); if (queryString !== "") { queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' @@ -265,12 +296,12 @@ export function getURLPathAndQuery(url) { * @param url - */ export function getURLQueries(url) { - let queryString = URLBuilder.parse(url).getQuery(); + let queryString = new URL(url).search; if (!queryString) { return {}; } queryString = queryString.trim(); - queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; + queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; let querySubStrings = queryString.split("&"); querySubStrings = querySubStrings.filter((value) => { const indexOfEqual = value.indexOf("="); @@ -294,15 +325,15 @@ export function getURLQueries(url) { * @returns An updated URL string. */ export function appendToURLQuery(url, queryParts) { - const urlParsed = URLBuilder.parse(url); - let query = urlParsed.getQuery(); + const urlParsed = new URL(url); + let query = urlParsed.search; if (query) { query += "&" + queryParts; } else { query = queryParts; } - urlParsed.setQuery(query); + urlParsed.search = query; return urlParsed.toString(); } /** @@ -415,16 +446,16 @@ export function sanitizeURL(url) { return safeURL; } export function sanitizeHeaders(originalHeader) { - const headers = new HttpHeaders(); - for (const header of originalHeader.headersArray()) { - if (header.name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) { - headers.set(header.name, "*****"); + const headers = createHttpHeaders(); + for (const [name, value] of originalHeader) { + if (name.toLowerCase() === HeaderConstants.AUTHORIZATION.toLowerCase()) { + headers.set(name, "*****"); } - else if (header.name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) { - headers.set(header.name, sanitizeURL(header.value)); + else if (name.toLowerCase() === HeaderConstants.X_MS_COPY_SOURCE) { + headers.set(name, sanitizeURL(value)); } else { - headers.set(header.name, header.value); + headers.set(name, value); } } return headers; @@ -444,18 +475,18 @@ export function iEqual(str1, str2) { * @returns with the account name */ export function getAccountNameFromUrl(url) { - const parsedUrl = URLBuilder.parse(url); + const parsedUrl = new URL(url); let accountName; try { - if (parsedUrl.getHost().split(".")[1] === "blob") { + if (parsedUrl.hostname.split(".")[1] === "blob") { // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; - accountName = parsedUrl.getHost().split(".")[0]; + accountName = parsedUrl.hostname.split(".")[0]; } else if (isIpEndpointStyle(parsedUrl)) { // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ // .getPath() -> /devstoreaccount1/ - accountName = parsedUrl.getPath().split("/")[1]; + accountName = parsedUrl.pathname.split("/")[1]; } else { // Custom domain case: "https://customdomain.com/containername/blob". @@ -468,16 +499,13 @@ export function getAccountNameFromUrl(url) { } } export function isIpEndpointStyle(parsedUrl) { - if (parsedUrl.getHost() === undefined) { - return false; - } - const host = parsedUrl.getHost() + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort()); + const host = parsedUrl.host; // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || - (parsedUrl.getPort() !== undefined && PathStylePorts.includes(parsedUrl.getPort()))); + (Boolean(parsedUrl.port) && PathStylePorts.includes(parsedUrl.port))); } /** * Convert Tags to encoded string. @@ -713,4 +741,16 @@ export function EscapePath(blobName) { } return split.join("/"); } +/** + * A typesafe helper for ensuring that a given response object has + * the original _response attached. + * @param response - A response object from calling a client operation + * @returns The same object, but with known _response property + */ +export function assertResponse(response) { + if (`_response` in response) { + return response; + } + throw new TypeError(`Unexpected response object ${response}`); +} //# sourceMappingURL=utils.common.js.map \ No newline at end of file diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js index b15390541..32cf8f50e 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-common/src/PooledBuffer.js @@ -4,8 +4,8 @@ import { BuffersStream } from "./BuffersStream"; /** * maxBufferLength is max size of each buffer in the pooled buffers. */ -// Can't use import as Typescript doesn't recognize "buffer". -const maxBufferLength = require("buffer").constants.MAX_LENGTH; +import buffer from "buffer"; +const maxBufferLength = buffer.constants.MAX_LENGTH; /** * This class provides a buffer container which conceptually has no hard size limit. * It accepts a capacity, an array of input buffers and the total length of input data. @@ -15,6 +15,12 @@ const maxBufferLength = require("buffer").constants.MAX_LENGTH; * assembled from all the data in the internal "buffer". */ export class PooledBuffer { + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } constructor(capacity, buffers, totalLength) { /** * Internal buffers used to keep the data. @@ -36,12 +42,6 @@ export class PooledBuffer { this.fill(buffers, totalLength); } } - /** - * The size of the data contained in the pooled buffers. - */ - get size() { - return this._size; - } /** * Fill the internal buffers with data in the input buffers serially * with respect to the total length and the total capacity of the internal buffers. diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js index 197fbd3fe..03798af14 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReadableFromStream.js @@ -4,17 +4,17 @@ import { AvroReadable } from "./AvroReadable"; import { AbortError } from "@azure/abort-controller"; const ABORT_ERROR = new AbortError("Reading from the avro stream was aborted."); export class AvroReadableFromStream extends AvroReadable { - constructor(readable) { - super(); - this._readable = readable; - this._position = 0; - } toUint8Array(data) { if (typeof data === "string") { return Buffer.from(data); } return data; } + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } get position() { return this._position; } diff --git a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js index 842137546..3f3fd08eb 100644 --- a/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js +++ b/node_modules/@azure/storage-blob/dist-esm/storage-internal-avro/src/AvroReader.js @@ -3,11 +3,16 @@ import { __asyncGenerator, __await } from "tslib"; // TODO: Do a review of non-interfaces /* eslint-disable @azure/azure-sdk/ts-use-interface-parameters */ -import "@azure/core-paging"; import { AVRO_CODEC_KEY, AVRO_INIT_BYTES, AVRO_SCHEMA_KEY, AVRO_SYNC_MARKER_SIZE, } from "./AvroConstants"; import { AvroParser, AvroType } from "./AvroParser"; import { arraysEqual } from "./utils/utils.common"; export class AvroReader { + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { this._dataStream = dataStream; this._headerStream = headerStream || dataStream; @@ -16,12 +21,6 @@ export class AvroReader { this._objectIndex = indexWithinCurrentBlock || 0; this._initialBlockOffset = currentBlockOffset || 0; } - get blockOffset() { - return this._blockOffset; - } - get objectIndex() { - return this._objectIndex; - } async initialize(options = {}) { const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { abortSignal: options.abortSignal, @@ -65,8 +64,8 @@ export class AvroReader { hasNext() { return !this._initialized || this._itemsRemainingInBlock > 0; } - parseObjects(options = {}) { - return __asyncGenerator(this, arguments, function* parseObjects_1() { + parseObjects() { + return __asyncGenerator(this, arguments, function* parseObjects_1(options = {}) { if (!this._initialized) { yield __await(this.initialize(options)); } diff --git a/node_modules/@azure/storage-blob/dist/index.js b/node_modules/@azure/storage-blob/dist/index.js index 8f7b8786c..aebd9ef2c 100644 --- a/node_modules/@azure/storage-blob/dist/index.js +++ b/node_modules/@azure/storage-blob/dist/index.js @@ -2,22 +2,25 @@ Object.defineProperty(exports, '__esModule', { value: true }); -var coreHttp = require('@azure/core-http'); +var coreRestPipeline = require('@azure/core-rest-pipeline'); var tslib = require('tslib'); -var coreTracing = require('@azure/core-tracing'); +var coreAuth = require('@azure/core-auth'); +var coreUtil = require('@azure/core-util'); +var coreHttpCompat = require('@azure/core-http-compat'); +var coreClient = require('@azure/core-client'); +var coreXml = require('@azure/core-xml'); var logger$1 = require('@azure/logger'); var abortController = require('@azure/abort-controller'); -var os = require('os'); var crypto = require('crypto'); +var coreTracing = require('@azure/core-tracing'); var stream = require('stream'); -require('@azure/core-paging'); var coreLro = require('@azure/core-lro'); var events = require('events'); var fs = require('fs'); var util = require('util'); +var buffer = require('buffer'); -function _interopNamespace(e) { - if (e && e.__esModule) return e; +function _interopNamespaceDefault(e) { var n = Object.create(null); if (e) { Object.keys(e).forEach(function (k) { @@ -30,3958 +33,3265 @@ function _interopNamespace(e) { } }); } - n["default"] = e; + n.default = e; return Object.freeze(n); } -var coreHttp__namespace = /*#__PURE__*/_interopNamespace(coreHttp); -var os__namespace = /*#__PURE__*/_interopNamespace(os); -var fs__namespace = /*#__PURE__*/_interopNamespace(fs); -var util__namespace = /*#__PURE__*/_interopNamespace(util); +var coreHttpCompat__namespace = /*#__PURE__*/_interopNamespaceDefault(coreHttpCompat); +var coreClient__namespace = /*#__PURE__*/_interopNamespaceDefault(coreClient); +var fs__namespace = /*#__PURE__*/_interopNamespaceDefault(fs); +var util__namespace = /*#__PURE__*/_interopNamespaceDefault(util); -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The `@azure/logger` configuration for this package. */ -const BlobServiceProperties = { - serializedName: "BlobServiceProperties", - xmlName: "StorageServiceProperties", - type: { - name: "Composite", - className: "BlobServiceProperties", - modelProperties: { - blobAnalyticsLogging: { - serializedName: "Logging", - xmlName: "Logging", - type: { - name: "Composite", - className: "Logging" - } - }, - hourMetrics: { - serializedName: "HourMetrics", - xmlName: "HourMetrics", - type: { - name: "Composite", - className: "Metrics" - } - }, - minuteMetrics: { - serializedName: "MinuteMetrics", - xmlName: "MinuteMetrics", - type: { - name: "Composite", - className: "Metrics" - } - }, - cors: { - serializedName: "Cors", - xmlName: "Cors", - xmlIsWrapped: true, - xmlElementName: "CorsRule", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "CorsRule" - } - } - } - }, - defaultServiceVersion: { - serializedName: "DefaultServiceVersion", - xmlName: "DefaultServiceVersion", - type: { - name: "String" - } - }, - deleteRetentionPolicy: { - serializedName: "DeleteRetentionPolicy", - xmlName: "DeleteRetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy" - } - }, - staticWebsite: { - serializedName: "StaticWebsite", - xmlName: "StaticWebsite", - type: { - name: "Composite", - className: "StaticWebsite" - } - } - } +const logger = logger$1.createClientLogger("storage-blob"); + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The base class from which all request policies derive. + */ +class BaseRequestPolicy { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { + this._nextPolicy = _nextPolicy; + this._options = _options; } -}; -const Logging = { - serializedName: "Logging", - type: { - name: "Composite", - className: "Logging", - modelProperties: { - version: { - serializedName: "Version", - required: true, - xmlName: "Version", - type: { - name: "String" - } - }, - deleteProperty: { - serializedName: "Delete", - required: true, - xmlName: "Delete", - type: { - name: "Boolean" - } - }, - read: { - serializedName: "Read", - required: true, - xmlName: "Read", - type: { - name: "Boolean" - } - }, - write: { - serializedName: "Write", - required: true, - xmlName: "Write", - type: { - name: "Boolean" - } - }, - retentionPolicy: { - serializedName: "RetentionPolicy", - xmlName: "RetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy" - } - } - } + /** + * Get whether or not a log with the provided log level should be logged. + * @param logLevel - The log level of the log that will be logged. + * @returns Whether or not a log with the provided log level should be logged. + */ + shouldLog(logLevel) { + return this._options.shouldLog(logLevel); } -}; -const RetentionPolicy = { - serializedName: "RetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy", - modelProperties: { - enabled: { - serializedName: "Enabled", - required: true, - xmlName: "Enabled", - type: { - name: "Boolean" - } - }, - days: { - constraints: { - InclusiveMinimum: 1 - }, - serializedName: "Days", - xmlName: "Days", - type: { - name: "Number" - } - } - } + /** + * Attempt to log the provided message to the provided logger. If no logger was provided or if + * the log level does not meat the logger's threshold, then nothing will be logged. + * @param logLevel - The log level of this log. + * @param message - The message of this log. + */ + log(logLevel, message) { + this._options.log(logLevel, message); } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +const SDK_VERSION = "12.23.0"; +const SERVICE_VERSION = "2024-05-04"; +const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB +const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB +const BLOCK_BLOB_MAX_BLOCKS = 50000; +const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB +const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB +const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; +const REQUEST_TIMEOUT = 100 * 1000; // In ms +/** + * The OAuth scope to use with Azure Storage. + */ +const StorageOAuthScopes = "https://storage.azure.com/.default"; +const URLConstants = { + Parameters: { + FORCE_BROWSER_NO_CACHE: "_", + SIGNATURE: "sig", + SNAPSHOT: "snapshot", + VERSIONID: "versionid", + TIMEOUT: "timeout", + }, }; -const Metrics = { - serializedName: "Metrics", - type: { - name: "Composite", - className: "Metrics", - modelProperties: { - version: { - serializedName: "Version", - xmlName: "Version", - type: { - name: "String" - } - }, - enabled: { - serializedName: "Enabled", - required: true, - xmlName: "Enabled", - type: { - name: "Boolean" - } - }, - includeAPIs: { - serializedName: "IncludeAPIs", - xmlName: "IncludeAPIs", - type: { - name: "Boolean" - } - }, - retentionPolicy: { - serializedName: "RetentionPolicy", - xmlName: "RetentionPolicy", - type: { - name: "Composite", - className: "RetentionPolicy" - } - } - } - } +const HTTPURLConnection = { + HTTP_ACCEPTED: 202, + HTTP_CONFLICT: 409, + HTTP_NOT_FOUND: 404, + HTTP_PRECON_FAILED: 412, + HTTP_RANGE_NOT_SATISFIABLE: 416, }; -const CorsRule = { - serializedName: "CorsRule", - type: { - name: "Composite", - className: "CorsRule", - modelProperties: { - allowedOrigins: { - serializedName: "AllowedOrigins", - required: true, - xmlName: "AllowedOrigins", - type: { - name: "String" - } - }, - allowedMethods: { - serializedName: "AllowedMethods", - required: true, - xmlName: "AllowedMethods", - type: { - name: "String" - } - }, - allowedHeaders: { - serializedName: "AllowedHeaders", - required: true, - xmlName: "AllowedHeaders", - type: { - name: "String" - } - }, - exposedHeaders: { - serializedName: "ExposedHeaders", - required: true, - xmlName: "ExposedHeaders", - type: { - name: "String" - } - }, - maxAgeInSeconds: { - constraints: { - InclusiveMinimum: 0 - }, - serializedName: "MaxAgeInSeconds", - required: true, - xmlName: "MaxAgeInSeconds", - type: { - name: "Number" - } - } - } - } -}; -const StaticWebsite = { - serializedName: "StaticWebsite", - type: { - name: "Composite", - className: "StaticWebsite", - modelProperties: { - enabled: { - serializedName: "Enabled", - required: true, - xmlName: "Enabled", - type: { - name: "Boolean" - } - }, - indexDocument: { - serializedName: "IndexDocument", - xmlName: "IndexDocument", - type: { - name: "String" - } - }, - errorDocument404Path: { - serializedName: "ErrorDocument404Path", - xmlName: "ErrorDocument404Path", - type: { - name: "String" - } - }, - defaultIndexDocumentPath: { - serializedName: "DefaultIndexDocumentPath", - xmlName: "DefaultIndexDocumentPath", - type: { - name: "String" - } - } - } - } -}; -const StorageError = { - serializedName: "StorageError", - type: { - name: "Composite", - className: "StorageError", - modelProperties: { - message: { - serializedName: "Message", - xmlName: "Message", - type: { - name: "String" - } - }, - code: { - serializedName: "Code", - xmlName: "Code", - type: { - name: "String" - } - } - } - } -}; -const BlobServiceStatistics = { - serializedName: "BlobServiceStatistics", - xmlName: "StorageServiceStats", - type: { - name: "Composite", - className: "BlobServiceStatistics", - modelProperties: { - geoReplication: { - serializedName: "GeoReplication", - xmlName: "GeoReplication", - type: { - name: "Composite", - className: "GeoReplication" - } - } - } - } -}; -const GeoReplication = { - serializedName: "GeoReplication", - type: { - name: "Composite", - className: "GeoReplication", - modelProperties: { - status: { - serializedName: "Status", - required: true, - xmlName: "Status", - type: { - name: "Enum", - allowedValues: ["live", "bootstrap", "unavailable"] - } - }, - lastSyncOn: { - serializedName: "LastSyncTime", - required: true, - xmlName: "LastSyncTime", - type: { - name: "DateTimeRfc1123" - } - } - } - } -}; -const ListContainersSegmentResponse = { - serializedName: "ListContainersSegmentResponse", - xmlName: "EnumerationResults", - type: { - name: "Composite", - className: "ListContainersSegmentResponse", - modelProperties: { - serviceEndpoint: { - serializedName: "ServiceEndpoint", - required: true, - xmlName: "ServiceEndpoint", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - prefix: { - serializedName: "Prefix", - xmlName: "Prefix", - type: { - name: "String" - } - }, - marker: { - serializedName: "Marker", - xmlName: "Marker", - type: { - name: "String" - } - }, - maxPageSize: { - serializedName: "MaxResults", - xmlName: "MaxResults", - type: { - name: "Number" - } - }, - containerItems: { - serializedName: "ContainerItems", - required: true, - xmlName: "Containers", - xmlIsWrapped: true, - xmlElementName: "Container", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ContainerItem" - } - } - } - }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } - } - } - } -}; -const ContainerItem = { - serializedName: "ContainerItem", - xmlName: "Container", - type: { - name: "Composite", - className: "ContainerItem", - modelProperties: { - name: { - serializedName: "Name", - required: true, - xmlName: "Name", - type: { - name: "String" - } - }, - deleted: { - serializedName: "Deleted", - xmlName: "Deleted", - type: { - name: "Boolean" - } - }, - version: { - serializedName: "Version", - xmlName: "Version", - type: { - name: "String" - } - }, - properties: { - serializedName: "Properties", - xmlName: "Properties", - type: { - name: "Composite", - className: "ContainerProperties" - } - }, - metadata: { - serializedName: "Metadata", - xmlName: "Metadata", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - } - } - } +const HeaderConstants = { + AUTHORIZATION: "Authorization", + AUTHORIZATION_SCHEME: "Bearer", + CONTENT_ENCODING: "Content-Encoding", + CONTENT_ID: "Content-ID", + CONTENT_LANGUAGE: "Content-Language", + CONTENT_LENGTH: "Content-Length", + CONTENT_MD5: "Content-Md5", + CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", + CONTENT_TYPE: "Content-Type", + COOKIE: "Cookie", + DATE: "date", + IF_MATCH: "if-match", + IF_MODIFIED_SINCE: "if-modified-since", + IF_NONE_MATCH: "if-none-match", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + PREFIX_FOR_STORAGE: "x-ms-", + RANGE: "Range", + USER_AGENT: "User-Agent", + X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", + X_MS_COPY_SOURCE: "x-ms-copy-source", + X_MS_DATE: "x-ms-date", + X_MS_ERROR_CODE: "x-ms-error-code", + X_MS_VERSION: "x-ms-version", }; -const ContainerProperties = { - serializedName: "ContainerProperties", - type: { - name: "Composite", - className: "ContainerProperties", - modelProperties: { - lastModified: { - serializedName: "Last-Modified", - required: true, - xmlName: "Last-Modified", - type: { - name: "DateTimeRfc1123" - } - }, - etag: { - serializedName: "Etag", - required: true, - xmlName: "Etag", - type: { - name: "String" - } - }, - leaseStatus: { - serializedName: "LeaseStatus", - xmlName: "LeaseStatus", - type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] - } - }, - leaseState: { - serializedName: "LeaseState", - xmlName: "LeaseState", - type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } - }, - leaseDuration: { - serializedName: "LeaseDuration", - xmlName: "LeaseDuration", - type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } - }, - publicAccess: { - serializedName: "PublicAccess", - xmlName: "PublicAccess", - type: { - name: "Enum", - allowedValues: ["container", "blob"] - } - }, - hasImmutabilityPolicy: { - serializedName: "HasImmutabilityPolicy", - xmlName: "HasImmutabilityPolicy", - type: { - name: "Boolean" - } - }, - hasLegalHold: { - serializedName: "HasLegalHold", - xmlName: "HasLegalHold", - type: { - name: "Boolean" - } - }, - defaultEncryptionScope: { - serializedName: "DefaultEncryptionScope", - xmlName: "DefaultEncryptionScope", - type: { - name: "String" - } - }, - preventEncryptionScopeOverride: { - serializedName: "DenyEncryptionScopeOverride", - xmlName: "DenyEncryptionScopeOverride", - type: { - name: "Boolean" - } - }, - deletedOn: { - serializedName: "DeletedTime", - xmlName: "DeletedTime", - type: { - name: "DateTimeRfc1123" - } - }, - remainingRetentionDays: { - serializedName: "RemainingRetentionDays", - xmlName: "RemainingRetentionDays", - type: { - name: "Number" - } - }, - isImmutableStorageWithVersioningEnabled: { - serializedName: "ImmutableStorageWithVersioningEnabled", - xmlName: "ImmutableStorageWithVersioningEnabled", - type: { - name: "Boolean" - } - } - } - } -}; -const KeyInfo = { - serializedName: "KeyInfo", - type: { - name: "Composite", - className: "KeyInfo", - modelProperties: { - startsOn: { - serializedName: "Start", - required: true, - xmlName: "Start", - type: { - name: "String" - } - }, - expiresOn: { - serializedName: "Expiry", - required: true, - xmlName: "Expiry", - type: { - name: "String" - } - } - } - } -}; -const UserDelegationKey = { - serializedName: "UserDelegationKey", - type: { - name: "Composite", - className: "UserDelegationKey", - modelProperties: { - signedObjectId: { - serializedName: "SignedOid", - required: true, - xmlName: "SignedOid", - type: { - name: "String" - } - }, - signedTenantId: { - serializedName: "SignedTid", - required: true, - xmlName: "SignedTid", - type: { - name: "String" - } - }, - signedStartsOn: { - serializedName: "SignedStart", - required: true, - xmlName: "SignedStart", - type: { - name: "String" - } - }, - signedExpiresOn: { - serializedName: "SignedExpiry", - required: true, - xmlName: "SignedExpiry", - type: { - name: "String" - } - }, - signedService: { - serializedName: "SignedService", - required: true, - xmlName: "SignedService", - type: { - name: "String" - } - }, - signedVersion: { - serializedName: "SignedVersion", - required: true, - xmlName: "SignedVersion", - type: { - name: "String" - } - }, - value: { - serializedName: "Value", - required: true, - xmlName: "Value", - type: { - name: "String" - } - } - } - } -}; -const FilterBlobSegment = { - serializedName: "FilterBlobSegment", - xmlName: "EnumerationResults", - type: { - name: "Composite", - className: "FilterBlobSegment", - modelProperties: { - serviceEndpoint: { - serializedName: "ServiceEndpoint", - required: true, - xmlName: "ServiceEndpoint", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - where: { - serializedName: "Where", - required: true, - xmlName: "Where", - type: { - name: "String" - } - }, - blobs: { - serializedName: "Blobs", - required: true, - xmlName: "Blobs", - xmlIsWrapped: true, - xmlElementName: "Blob", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "FilterBlobItem" - } - } - } - }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } - } - } - } -}; -const FilterBlobItem = { - serializedName: "FilterBlobItem", - xmlName: "Blob", - type: { - name: "Composite", - className: "FilterBlobItem", - modelProperties: { - name: { - serializedName: "Name", - required: true, - xmlName: "Name", - type: { - name: "String" - } - }, - containerName: { - serializedName: "ContainerName", - required: true, - xmlName: "ContainerName", - type: { - name: "String" - } - }, - tags: { - serializedName: "Tags", - xmlName: "Tags", - type: { - name: "Composite", - className: "BlobTags" - } - } - } - } -}; -const BlobTags = { - serializedName: "BlobTags", - xmlName: "Tags", - type: { - name: "Composite", - className: "BlobTags", - modelProperties: { - blobTagSet: { - serializedName: "BlobTagSet", - required: true, - xmlName: "TagSet", - xmlIsWrapped: true, - xmlElementName: "Tag", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "BlobTag" - } - } - } - } - } - } -}; -const BlobTag = { - serializedName: "BlobTag", - xmlName: "Tag", - type: { - name: "Composite", - className: "BlobTag", - modelProperties: { - key: { - serializedName: "Key", - required: true, - xmlName: "Key", - type: { - name: "String" - } - }, - value: { - serializedName: "Value", - required: true, - xmlName: "Value", - type: { - name: "String" - } - } - } - } -}; -const SignedIdentifier = { - serializedName: "SignedIdentifier", - xmlName: "SignedIdentifier", - type: { - name: "Composite", - className: "SignedIdentifier", - modelProperties: { - id: { - serializedName: "Id", - required: true, - xmlName: "Id", - type: { - name: "String" - } - }, - accessPolicy: { - serializedName: "AccessPolicy", - xmlName: "AccessPolicy", - type: { - name: "Composite", - className: "AccessPolicy" - } - } - } - } -}; -const AccessPolicy = { - serializedName: "AccessPolicy", - type: { - name: "Composite", - className: "AccessPolicy", - modelProperties: { - startsOn: { - serializedName: "Start", - xmlName: "Start", - type: { - name: "String" - } - }, - expiresOn: { - serializedName: "Expiry", - xmlName: "Expiry", - type: { - name: "String" - } - }, - permissions: { - serializedName: "Permission", - xmlName: "Permission", - type: { - name: "String" - } - } - } - } -}; -const ListBlobsFlatSegmentResponse = { - serializedName: "ListBlobsFlatSegmentResponse", - xmlName: "EnumerationResults", - type: { - name: "Composite", - className: "ListBlobsFlatSegmentResponse", - modelProperties: { - serviceEndpoint: { - serializedName: "ServiceEndpoint", - required: true, - xmlName: "ServiceEndpoint", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - containerName: { - serializedName: "ContainerName", - required: true, - xmlName: "ContainerName", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - prefix: { - serializedName: "Prefix", - xmlName: "Prefix", - type: { - name: "String" - } - }, - marker: { - serializedName: "Marker", - xmlName: "Marker", - type: { - name: "String" - } - }, - maxPageSize: { - serializedName: "MaxResults", - xmlName: "MaxResults", - type: { - name: "Number" - } - }, - segment: { - serializedName: "Segment", - xmlName: "Blobs", - type: { - name: "Composite", - className: "BlobFlatListSegment" - } - }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } - } - } - } -}; -const BlobFlatListSegment = { - serializedName: "BlobFlatListSegment", - xmlName: "Blobs", - type: { - name: "Composite", - className: "BlobFlatListSegment", - modelProperties: { - blobItems: { - serializedName: "BlobItems", - required: true, - xmlName: "BlobItems", - xmlElementName: "Blob", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "BlobItemInternal" - } - } - } - } - } - } -}; -const BlobItemInternal = { - serializedName: "BlobItemInternal", - xmlName: "Blob", - type: { - name: "Composite", - className: "BlobItemInternal", - modelProperties: { - name: { - serializedName: "Name", - xmlName: "Name", - type: { - name: "Composite", - className: "BlobName" - } - }, - deleted: { - serializedName: "Deleted", - required: true, - xmlName: "Deleted", - type: { - name: "Boolean" - } - }, - snapshot: { - serializedName: "Snapshot", - required: true, - xmlName: "Snapshot", - type: { - name: "String" - } - }, - versionId: { - serializedName: "VersionId", - xmlName: "VersionId", - type: { - name: "String" - } - }, - isCurrentVersion: { - serializedName: "IsCurrentVersion", - xmlName: "IsCurrentVersion", - type: { - name: "Boolean" - } - }, - properties: { - serializedName: "Properties", - xmlName: "Properties", - type: { - name: "Composite", - className: "BlobPropertiesInternal" - } - }, - metadata: { - serializedName: "Metadata", - xmlName: "Metadata", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - blobTags: { - serializedName: "BlobTags", - xmlName: "Tags", - type: { - name: "Composite", - className: "BlobTags" - } - }, - objectReplicationMetadata: { - serializedName: "ObjectReplicationMetadata", - xmlName: "OrMetadata", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - } - }, - hasVersionsOnly: { - serializedName: "HasVersionsOnly", - xmlName: "HasVersionsOnly", - type: { - name: "Boolean" - } - } - } - } -}; -const BlobName = { - serializedName: "BlobName", - type: { - name: "Composite", - className: "BlobName", - modelProperties: { - encoded: { - serializedName: "Encoded", - xmlName: "Encoded", - xmlIsAttribute: true, - type: { - name: "Boolean" - } - }, - content: { - serializedName: "content", - xmlName: "content", - xmlIsMsText: true, - type: { - name: "String" - } - } - } - } -}; -const BlobPropertiesInternal = { - serializedName: "BlobPropertiesInternal", - xmlName: "Properties", - type: { - name: "Composite", - className: "BlobPropertiesInternal", - modelProperties: { - createdOn: { - serializedName: "Creation-Time", - xmlName: "Creation-Time", - type: { - name: "DateTimeRfc1123" - } - }, - lastModified: { - serializedName: "Last-Modified", - required: true, - xmlName: "Last-Modified", - type: { - name: "DateTimeRfc1123" - } - }, - etag: { - serializedName: "Etag", - required: true, - xmlName: "Etag", - type: { - name: "String" - } - }, - contentLength: { - serializedName: "Content-Length", - xmlName: "Content-Length", - type: { - name: "Number" - } - }, - contentType: { - serializedName: "Content-Type", - xmlName: "Content-Type", - type: { - name: "String" - } - }, - contentEncoding: { - serializedName: "Content-Encoding", - xmlName: "Content-Encoding", - type: { - name: "String" - } - }, - contentLanguage: { - serializedName: "Content-Language", - xmlName: "Content-Language", - type: { - name: "String" - } - }, - contentMD5: { - serializedName: "Content-MD5", - xmlName: "Content-MD5", - type: { - name: "ByteArray" - } - }, - contentDisposition: { - serializedName: "Content-Disposition", - xmlName: "Content-Disposition", - type: { - name: "String" - } - }, - cacheControl: { - serializedName: "Cache-Control", - xmlName: "Cache-Control", - type: { - name: "String" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, - blobType: { - serializedName: "BlobType", - xmlName: "BlobType", - type: { - name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } - }, - leaseStatus: { - serializedName: "LeaseStatus", - xmlName: "LeaseStatus", - type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] - } - }, - leaseState: { - serializedName: "LeaseState", - xmlName: "LeaseState", - type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } - }, - leaseDuration: { - serializedName: "LeaseDuration", - xmlName: "LeaseDuration", - type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } - }, - copyId: { - serializedName: "CopyId", - xmlName: "CopyId", - type: { - name: "String" - } - }, - copyStatus: { - serializedName: "CopyStatus", - xmlName: "CopyStatus", - type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } - }, - copySource: { - serializedName: "CopySource", - xmlName: "CopySource", - type: { - name: "String" - } - }, - copyProgress: { - serializedName: "CopyProgress", - xmlName: "CopyProgress", - type: { - name: "String" - } - }, - copyCompletedOn: { - serializedName: "CopyCompletionTime", - xmlName: "CopyCompletionTime", - type: { - name: "DateTimeRfc1123" - } - }, - copyStatusDescription: { - serializedName: "CopyStatusDescription", - xmlName: "CopyStatusDescription", - type: { - name: "String" - } - }, - serverEncrypted: { - serializedName: "ServerEncrypted", - xmlName: "ServerEncrypted", - type: { - name: "Boolean" - } - }, - incrementalCopy: { - serializedName: "IncrementalCopy", - xmlName: "IncrementalCopy", - type: { - name: "Boolean" - } - }, - destinationSnapshot: { - serializedName: "DestinationSnapshot", - xmlName: "DestinationSnapshot", - type: { - name: "String" - } - }, - deletedOn: { - serializedName: "DeletedTime", - xmlName: "DeletedTime", - type: { - name: "DateTimeRfc1123" - } - }, - remainingRetentionDays: { - serializedName: "RemainingRetentionDays", - xmlName: "RemainingRetentionDays", - type: { - name: "Number" - } - }, - accessTier: { - serializedName: "AccessTier", - xmlName: "AccessTier", - type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] - } - }, - accessTierInferred: { - serializedName: "AccessTierInferred", - xmlName: "AccessTierInferred", - type: { - name: "Boolean" - } - }, - archiveStatus: { - serializedName: "ArchiveStatus", - xmlName: "ArchiveStatus", - type: { - name: "Enum", - allowedValues: [ - "rehydrate-pending-to-hot", - "rehydrate-pending-to-cool", - "rehydrate-pending-to-cold" - ] - } - }, - customerProvidedKeySha256: { - serializedName: "CustomerProvidedKeySha256", - xmlName: "CustomerProvidedKeySha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "EncryptionScope", - xmlName: "EncryptionScope", - type: { - name: "String" - } - }, - accessTierChangedOn: { - serializedName: "AccessTierChangeTime", - xmlName: "AccessTierChangeTime", - type: { - name: "DateTimeRfc1123" - } - }, - tagCount: { - serializedName: "TagCount", - xmlName: "TagCount", - type: { - name: "Number" - } - }, - expiresOn: { - serializedName: "Expiry-Time", - xmlName: "Expiry-Time", - type: { - name: "DateTimeRfc1123" - } - }, - isSealed: { - serializedName: "Sealed", - xmlName: "Sealed", - type: { - name: "Boolean" - } - }, - rehydratePriority: { - serializedName: "RehydratePriority", - xmlName: "RehydratePriority", - type: { - name: "Enum", - allowedValues: ["High", "Standard"] - } - }, - lastAccessedOn: { - serializedName: "LastAccessTime", - xmlName: "LastAccessTime", - type: { - name: "DateTimeRfc1123" - } - }, - immutabilityPolicyExpiresOn: { - serializedName: "ImmutabilityPolicyUntilDate", - xmlName: "ImmutabilityPolicyUntilDate", - type: { - name: "DateTimeRfc1123" - } - }, - immutabilityPolicyMode: { - serializedName: "ImmutabilityPolicyMode", - xmlName: "ImmutabilityPolicyMode", - type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } - }, - legalHold: { - serializedName: "LegalHold", - xmlName: "LegalHold", - type: { - name: "Boolean" - } +const ETagNone = ""; +const ETagAny = "*"; +const SIZE_1_MB = 1 * 1024 * 1024; +const BATCH_MAX_REQUEST = 256; +const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; +const HTTP_LINE_ENDING = "\r\n"; +const HTTP_VERSION_1_1 = "HTTP/1.1"; +const EncryptionAlgorithmAES25 = "AES256"; +const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; +const StorageBlobLoggingAllowedHeaderNames = [ + "Access-Control-Allow-Origin", + "Cache-Control", + "Content-Length", + "Content-Type", + "Date", + "Request-Id", + "traceparent", + "Transfer-Encoding", + "User-Agent", + "x-ms-client-request-id", + "x-ms-date", + "x-ms-error-code", + "x-ms-request-id", + "x-ms-return-client-request-id", + "x-ms-version", + "Accept-Ranges", + "Content-Disposition", + "Content-Encoding", + "Content-Language", + "Content-MD5", + "Content-Range", + "ETag", + "Last-Modified", + "Server", + "Vary", + "x-ms-content-crc64", + "x-ms-copy-action", + "x-ms-copy-completion-time", + "x-ms-copy-id", + "x-ms-copy-progress", + "x-ms-copy-status", + "x-ms-has-immutability-policy", + "x-ms-has-legal-hold", + "x-ms-lease-state", + "x-ms-lease-status", + "x-ms-range", + "x-ms-request-server-encrypted", + "x-ms-server-encrypted", + "x-ms-snapshot", + "x-ms-source-range", + "If-Match", + "If-Modified-Since", + "If-None-Match", + "If-Unmodified-Since", + "x-ms-access-tier", + "x-ms-access-tier-change-time", + "x-ms-access-tier-inferred", + "x-ms-account-kind", + "x-ms-archive-status", + "x-ms-blob-append-offset", + "x-ms-blob-cache-control", + "x-ms-blob-committed-block-count", + "x-ms-blob-condition-appendpos", + "x-ms-blob-condition-maxsize", + "x-ms-blob-content-disposition", + "x-ms-blob-content-encoding", + "x-ms-blob-content-language", + "x-ms-blob-content-length", + "x-ms-blob-content-md5", + "x-ms-blob-content-type", + "x-ms-blob-public-access", + "x-ms-blob-sequence-number", + "x-ms-blob-type", + "x-ms-copy-destination-snapshot", + "x-ms-creation-time", + "x-ms-default-encryption-scope", + "x-ms-delete-snapshots", + "x-ms-delete-type-permanent", + "x-ms-deny-encryption-scope-override", + "x-ms-encryption-algorithm", + "x-ms-if-sequence-number-eq", + "x-ms-if-sequence-number-le", + "x-ms-if-sequence-number-lt", + "x-ms-incremental-copy", + "x-ms-lease-action", + "x-ms-lease-break-period", + "x-ms-lease-duration", + "x-ms-lease-id", + "x-ms-lease-time", + "x-ms-page-write", + "x-ms-proposed-lease-id", + "x-ms-range-get-content-md5", + "x-ms-rehydrate-priority", + "x-ms-sequence-number-action", + "x-ms-sku-name", + "x-ms-source-content-md5", + "x-ms-source-if-match", + "x-ms-source-if-modified-since", + "x-ms-source-if-none-match", + "x-ms-source-if-unmodified-since", + "x-ms-tag-count", + "x-ms-encryption-key-sha256", + "x-ms-if-tags", + "x-ms-source-if-tags", +]; +const StorageBlobLoggingAllowedQueryParameters = [ + "comp", + "maxresults", + "rscc", + "rscd", + "rsce", + "rscl", + "rsct", + "se", + "si", + "sip", + "sp", + "spr", + "sr", + "srt", + "ss", + "st", + "sv", + "include", + "marker", + "prefix", + "copyid", + "restype", + "blockid", + "blocklisttype", + "delimiter", + "prevsnapshot", + "ske", + "skoid", + "sks", + "skt", + "sktid", + "skv", + "snapshot", +]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; +const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; +/// List of ports used for path style addressing. +/// Path style addressing means that storage account is put in URI's Path segment in instead of in host. +const PathStylePorts = [ + "10000", + "10001", + "10002", + "10003", + "10004", + "10100", + "10101", + "10102", + "10103", + "10104", + "11000", + "11001", + "11002", + "11003", + "11004", + "11100", + "11101", + "11102", + "11103", + "11104", +]; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Reserved URL characters must be properly escaped for Storage services like Blob or File. + * + * ## URL encode and escape strategy for JS SDKs + * + * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. + * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL + * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. + * + * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. + * + * This is what legacy V2 SDK does, simple and works for most of the cases. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. + * + * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is + * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. + * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. + * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. + * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: + * + * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. + * + * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. + * - When customer URL string is "http://account.blob.core.windows.net/con/b:", + * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. + * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", + * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. + * + * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string + * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. + * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. + * And following URL strings are invalid: + * - "http://account.blob.core.windows.net/con/b%" + * - "http://account.blob.core.windows.net/con/b%2" + * - "http://account.blob.core.windows.net/con/b%G" + * + * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. + * + * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` + * + * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. + * + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata + * + * @param url - + */ +function escapeURLPath(url) { + const urlParsed = new URL(url); + let path = urlParsed.pathname; + path = path || "/"; + path = escape(path); + urlParsed.pathname = path; + return urlParsed.toString(); +} +function getProxyUriFromDevConnString(connectionString) { + // Development Connection String + // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key + let proxyUri = ""; + if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { + // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri + const matchCredentials = connectionString.split(";"); + for (const element of matchCredentials) { + if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { + proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; } } - } -}; -const ListBlobsHierarchySegmentResponse = { - serializedName: "ListBlobsHierarchySegmentResponse", - xmlName: "EnumerationResults", - type: { - name: "Composite", - className: "ListBlobsHierarchySegmentResponse", - modelProperties: { - serviceEndpoint: { - serializedName: "ServiceEndpoint", - required: true, - xmlName: "ServiceEndpoint", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - containerName: { - serializedName: "ContainerName", - required: true, - xmlName: "ContainerName", - xmlIsAttribute: true, - type: { - name: "String" - } - }, - prefix: { - serializedName: "Prefix", - xmlName: "Prefix", - type: { - name: "String" - } - }, - marker: { - serializedName: "Marker", - xmlName: "Marker", - type: { - name: "String" - } - }, - maxPageSize: { - serializedName: "MaxResults", - xmlName: "MaxResults", - type: { - name: "Number" - } - }, - delimiter: { - serializedName: "Delimiter", - xmlName: "Delimiter", - type: { - name: "String" - } - }, - segment: { - serializedName: "Segment", - xmlName: "Blobs", - type: { - name: "Composite", - className: "BlobHierarchyListSegment" - } - }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } + } + return proxyUri; +} +function getValueInConnString(connectionString, argument) { + const elements = connectionString.split(";"); + for (const element of elements) { + if (element.trim().startsWith(argument)) { + return element.trim().match(argument + "=(.*)")[1]; + } + } + return ""; +} +/** + * Extracts the parts of an Azure Storage account connection string. + * + * @param connectionString - Connection string. + * @returns String key value pairs of the storage account's url and credentials. + */ +function extractConnectionStringParts(connectionString) { + let proxyUri = ""; + if (connectionString.startsWith("UseDevelopmentStorage=true")) { + // Development connection string + proxyUri = getProxyUriFromDevConnString(connectionString); + connectionString = DevelopmentConnectionString; + } + // Matching BlobEndpoint in the Account connection string + let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); + // Slicing off '/' at the end if exists + // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) + blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; + if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && + connectionString.search("AccountKey=") !== -1) { + // Account connection string + let defaultEndpointsProtocol = ""; + let accountName = ""; + let accountKey = Buffer.from("accountKey", "base64"); + let endpointSuffix = ""; + // Get account name and key + accountName = getValueInConnString(connectionString, "AccountName"); + accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); + if (!blobEndpoint) { + // BlobEndpoint is not present in the Account connection string + // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` + defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); + const protocol = defaultEndpointsProtocol.toLowerCase(); + if (protocol !== "https" && protocol !== "http") { + throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); + } + endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); + if (!endpointSuffix) { + throw new Error("Invalid EndpointSuffix in the provided Connection String"); } + blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + } + if (!accountName) { + throw new Error("Invalid AccountName in the provided Connection String"); + } + else if (accountKey.length === 0) { + throw new Error("Invalid AccountKey in the provided Connection String"); + } + return { + kind: "AccountConnString", + url: blobEndpoint, + accountName, + accountKey, + proxyUri, + }; + } + else { + // SAS connection string + let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); + let accountName = getValueInConnString(connectionString, "AccountName"); + // if accountName is empty, try to read it from BlobEndpoint + if (!accountName) { + accountName = getAccountNameFromUrl(blobEndpoint); + } + if (!blobEndpoint) { + throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); + } + else if (!accountSas) { + throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); + } + // client constructors assume accountSas does *not* start with ? + if (accountSas.startsWith("?")) { + accountSas = accountSas.substring(1); } + return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; } -}; -const BlobHierarchyListSegment = { - serializedName: "BlobHierarchyListSegment", - xmlName: "Blobs", - type: { - name: "Composite", - className: "BlobHierarchyListSegment", - modelProperties: { - blobPrefixes: { - serializedName: "BlobPrefixes", - xmlName: "BlobPrefixes", - xmlElementName: "BlobPrefix", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "BlobPrefix" - } - } - } - }, - blobItems: { - serializedName: "BlobItems", - required: true, - xmlName: "BlobItems", - xmlElementName: "Blob", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "BlobItemInternal" - } - } - } +} +/** + * Internal escape method implemented Strategy Two mentioned in escapeURL() description. + * + * @param text - + */ +function escape(text) { + return encodeURIComponent(text) + .replace(/%2F/g, "/") // Don't escape for "/" + .replace(/'/g, "%27") // Escape for "'" + .replace(/\+/g, "%20") + .replace(/%25/g, "%"); // Revert encoded "%" +} +/** + * Append a string to URL path. Will remove duplicated "/" in front of the string + * when URL path ends with a "/". + * + * @param url - Source URL string + * @param name - String to be appended to URL + * @returns An updated URL string + */ +function appendToURLPath(url, name) { + const urlParsed = new URL(url); + let path = urlParsed.pathname; + path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; + urlParsed.pathname = path; + return urlParsed.toString(); +} +/** + * Set URL parameter name and value. If name exists in URL parameters, old value + * will be replaced by name key. If not provide value, the parameter will be deleted. + * + * @param url - Source URL string + * @param name - Parameter name + * @param value - Parameter value + * @returns An updated URL string + */ +function setURLParameter(url, name, value) { + const urlParsed = new URL(url); + const encodedName = encodeURIComponent(name); + const encodedValue = value ? encodeURIComponent(value) : undefined; + // mutating searchParams will change the encoding, so we have to do this ourselves + const searchString = urlParsed.search === "" ? "?" : urlParsed.search; + const searchPieces = []; + for (const pair of searchString.slice(1).split("&")) { + if (pair) { + const [key] = pair.split("=", 2); + if (key !== encodedName) { + searchPieces.push(pair); } } } -}; -const BlobPrefix = { - serializedName: "BlobPrefix", - type: { - name: "Composite", - className: "BlobPrefix", - modelProperties: { - name: { - serializedName: "Name", - xmlName: "Name", - type: { - name: "Composite", - className: "BlobName" - } + if (encodedValue) { + searchPieces.push(`${encodedName}=${encodedValue}`); + } + urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; + return urlParsed.toString(); +} +/** + * Get URL parameter by name. + * + * @param url - + * @param name - + */ +function getURLParameter(url, name) { + var _a; + const urlParsed = new URL(url); + return (_a = urlParsed.searchParams.get(name)) !== null && _a !== void 0 ? _a : undefined; +} +/** + * Set URL host. + * + * @param url - Source URL string + * @param host - New host string + * @returns An updated URL string + */ +function setURLHost(url, host) { + const urlParsed = new URL(url); + urlParsed.hostname = host; + return urlParsed.toString(); +} +/** + * Get URL path from an URL string. + * + * @param url - Source URL string + */ +function getURLPath(url) { + try { + const urlParsed = new URL(url); + return urlParsed.pathname; + } + catch (e) { + return undefined; + } +} +/** + * Get URL scheme from an URL string. + * + * @param url - Source URL string + */ +function getURLScheme(url) { + try { + const urlParsed = new URL(url); + return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; + } + catch (e) { + return undefined; + } +} +/** + * Get URL path and query from an URL string. + * + * @param url - Source URL string + */ +function getURLPathAndQuery(url) { + const urlParsed = new URL(url); + const pathString = urlParsed.pathname; + if (!pathString) { + throw new RangeError("Invalid url without valid path."); + } + let queryString = urlParsed.search || ""; + queryString = queryString.trim(); + if (queryString !== "") { + queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + } + return `${pathString}${queryString}`; +} +/** + * Get URL query key value pairs from an URL string. + * + * @param url - + */ +function getURLQueries(url) { + let queryString = new URL(url).search; + if (!queryString) { + return {}; + } + queryString = queryString.trim(); + queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; + let querySubStrings = queryString.split("&"); + querySubStrings = querySubStrings.filter((value) => { + const indexOfEqual = value.indexOf("="); + const lastIndexOfEqual = value.lastIndexOf("="); + return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); + }); + const queries = {}; + for (const querySubString of querySubStrings) { + const splitResults = querySubString.split("="); + const key = splitResults[0]; + const value = splitResults[1]; + queries[key] = value; + } + return queries; +} +/** + * Append a string to URL query. + * + * @param url - Source URL string. + * @param queryParts - String to be appended to the URL query. + * @returns An updated URL string. + */ +function appendToURLQuery(url, queryParts) { + const urlParsed = new URL(url); + let query = urlParsed.search; + if (query) { + query += "&" + queryParts; + } + else { + query = queryParts; + } + urlParsed.search = query; + return urlParsed.toString(); +} +/** + * Rounds a date off to seconds. + * + * @param date - + * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; + * If false, YYYY-MM-DDThh:mm:ssZ will be returned. + * @returns Date string in ISO8061 format, with or without 7 milliseconds component + */ +function truncatedISO8061Date(date, withMilliseconds = true) { + // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" + const dateString = date.toISOString(); + return withMilliseconds + ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" + : dateString.substring(0, dateString.length - 5) + "Z"; +} +/** + * Base64 encode. + * + * @param content - + */ +function base64encode(content) { + return !coreUtil.isNode ? btoa(content) : Buffer.from(content).toString("base64"); +} +/** + * Generate a 64 bytes base64 block ID string. + * + * @param blockIndex - + */ +function generateBlockID(blockIDPrefix, blockIndex) { + // To generate a 64 bytes base64 string, source string should be 48 + const maxSourceStringLength = 48; + // A blob can have a maximum of 100,000 uncommitted blocks at any given time + const maxBlockIndexLength = 6; + const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; + if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { + blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + } + const res = blockIDPrefix + + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); + return base64encode(res); +} +/** + * Delay specified time interval. + * + * @param timeInMs - + * @param aborter - + * @param abortError - + */ +async function delay(timeInMs, aborter, abortError) { + return new Promise((resolve, reject) => { + /* eslint-disable-next-line prefer-const */ + let timeout; + const abortHandler = () => { + if (timeout !== undefined) { + clearTimeout(timeout); } - } - } -}; -const BlockLookupList = { - serializedName: "BlockLookupList", - xmlName: "BlockList", - type: { - name: "Composite", - className: "BlockLookupList", - modelProperties: { - committed: { - serializedName: "Committed", - xmlName: "Committed", - xmlElementName: "Committed", - type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } - }, - uncommitted: { - serializedName: "Uncommitted", - xmlName: "Uncommitted", - xmlElementName: "Uncommitted", - type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } - }, - latest: { - serializedName: "Latest", - xmlName: "Latest", - xmlElementName: "Latest", - type: { - name: "Sequence", - element: { - type: { - name: "String" - } - } - } + reject(abortError); + }; + const resolveHandler = () => { + if (aborter !== undefined) { + aborter.removeEventListener("abort", abortHandler); } + resolve(); + }; + timeout = setTimeout(resolveHandler, timeInMs); + if (aborter !== undefined) { + aborter.addEventListener("abort", abortHandler); } + }); +} +/** + * String.prototype.padStart() + * + * @param currentString - + * @param targetLength - + * @param padString - + */ +function padStart(currentString, targetLength, padString = " ") { + // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes + if (String.prototype.padStart) { + return currentString.padStart(targetLength, padString); } -}; -const BlockList = { - serializedName: "BlockList", - type: { - name: "Composite", - className: "BlockList", - modelProperties: { - committedBlocks: { - serializedName: "CommittedBlocks", - xmlName: "CommittedBlocks", - xmlIsWrapped: true, - xmlElementName: "Block", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "Block" - } - } - } - }, - uncommittedBlocks: { - serializedName: "UncommittedBlocks", - xmlName: "UncommittedBlocks", - xmlIsWrapped: true, - xmlElementName: "Block", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "Block" - } - } - } - } - } + padString = padString || " "; + if (currentString.length > targetLength) { + return currentString; } -}; -const Block = { - serializedName: "Block", - type: { - name: "Composite", - className: "Block", - modelProperties: { - name: { - serializedName: "Name", - required: true, - xmlName: "Name", - type: { - name: "String" - } - }, - size: { - serializedName: "Size", - required: true, - xmlName: "Size", - type: { - name: "Number" - } - } + else { + targetLength = targetLength - currentString.length; + if (targetLength > padString.length) { + padString += padString.repeat(targetLength / padString.length); } + return padString.slice(0, targetLength) + currentString; } -}; -const PageList = { - serializedName: "PageList", - type: { - name: "Composite", - className: "PageList", - modelProperties: { - pageRange: { - serializedName: "PageRange", - xmlName: "PageRange", - xmlElementName: "PageRange", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "PageRange" - } - } - } - }, - clearRange: { - serializedName: "ClearRange", - xmlName: "ClearRange", - xmlElementName: "ClearRange", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ClearRange" - } - } - } - }, - continuationToken: { - serializedName: "NextMarker", - xmlName: "NextMarker", - type: { - name: "String" - } - } +} +/** + * If two strings are equal when compared case insensitive. + * + * @param str1 - + * @param str2 - + */ +function iEqual(str1, str2) { + return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); +} +/** + * Extracts account name from the url + * @param url - url to extract the account name from + * @returns with the account name + */ +function getAccountNameFromUrl(url) { + const parsedUrl = new URL(url); + let accountName; + try { + if (parsedUrl.hostname.split(".")[1] === "blob") { + // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; + accountName = parsedUrl.hostname.split(".")[0]; + } + else if (isIpEndpointStyle(parsedUrl)) { + // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ + // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ + // .getPath() -> /devstoreaccount1/ + accountName = parsedUrl.pathname.split("/")[1]; } - } -}; -const PageRange = { - serializedName: "PageRange", - xmlName: "PageRange", - type: { - name: "Composite", - className: "PageRange", - modelProperties: { - start: { - serializedName: "Start", - required: true, - xmlName: "Start", - type: { - name: "Number" - } - }, - end: { - serializedName: "End", - required: true, - xmlName: "End", - type: { - name: "Number" - } - } + else { + // Custom domain case: "https://customdomain.com/containername/blob". + accountName = ""; } + return accountName; } -}; -const ClearRange = { - serializedName: "ClearRange", - xmlName: "ClearRange", - type: { - name: "Composite", - className: "ClearRange", - modelProperties: { - start: { - serializedName: "Start", - required: true, - xmlName: "Start", - type: { - name: "Number" - } - }, - end: { - serializedName: "End", - required: true, - xmlName: "End", - type: { - name: "Number" - } - } - } + catch (error) { + throw new Error("Unable to extract accountName with provided information."); } -}; -const QueryRequest = { - serializedName: "QueryRequest", - xmlName: "QueryRequest", - type: { - name: "Composite", - className: "QueryRequest", - modelProperties: { - queryType: { - serializedName: "QueryType", - required: true, - xmlName: "QueryType", - type: { - name: "String" - } - }, - expression: { - serializedName: "Expression", - required: true, - xmlName: "Expression", - type: { - name: "String" - } - }, - inputSerialization: { - serializedName: "InputSerialization", - xmlName: "InputSerialization", - type: { - name: "Composite", - className: "QuerySerialization" - } - }, - outputSerialization: { - serializedName: "OutputSerialization", - xmlName: "OutputSerialization", - type: { - name: "Composite", - className: "QuerySerialization" - } - } - } +} +function isIpEndpointStyle(parsedUrl) { + const host = parsedUrl.host; + // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. + // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. + // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. + // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. + return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || + (Boolean(parsedUrl.port) && PathStylePorts.includes(parsedUrl.port))); +} +/** + * Convert Tags to encoded string. + * + * @param tags - + */ +function toBlobTagsString(tags) { + if (tags === undefined) { + return undefined; } -}; -const QuerySerialization = { - serializedName: "QuerySerialization", - type: { - name: "Composite", - className: "QuerySerialization", - modelProperties: { - format: { - serializedName: "Format", - xmlName: "Format", - type: { - name: "Composite", - className: "QueryFormat" - } - } + const tagPairs = []; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); } } -}; -const QueryFormat = { - serializedName: "QueryFormat", - type: { - name: "Composite", - className: "QueryFormat", - modelProperties: { - type: { - serializedName: "Type", - required: true, - xmlName: "Type", - type: { - name: "Enum", - allowedValues: ["delimited", "json", "arrow", "parquet"] - } - }, - delimitedTextConfiguration: { - serializedName: "DelimitedTextConfiguration", - xmlName: "DelimitedTextConfiguration", - type: { - name: "Composite", - className: "DelimitedTextConfiguration" - } - }, - jsonTextConfiguration: { - serializedName: "JsonTextConfiguration", - xmlName: "JsonTextConfiguration", - type: { - name: "Composite", - className: "JsonTextConfiguration" - } - }, - arrowConfiguration: { - serializedName: "ArrowConfiguration", - xmlName: "ArrowConfiguration", - type: { - name: "Composite", - className: "ArrowConfiguration" - } - }, - parquetTextConfiguration: { - serializedName: "ParquetTextConfiguration", - xmlName: "ParquetTextConfiguration", - type: { - name: "any" - } - } + return tagPairs.join("&"); +} +/** + * Convert Tags type to BlobTags. + * + * @param tags - + */ +function toBlobTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = { + blobTagSet: [], + }; + for (const key in tags) { + if (Object.prototype.hasOwnProperty.call(tags, key)) { + const value = tags[key]; + res.blobTagSet.push({ + key, + value, + }); } } -}; -const DelimitedTextConfiguration = { - serializedName: "DelimitedTextConfiguration", - xmlName: "DelimitedTextConfiguration", - type: { - name: "Composite", - className: "DelimitedTextConfiguration", - modelProperties: { - columnSeparator: { - serializedName: "ColumnSeparator", - xmlName: "ColumnSeparator", - type: { - name: "String" - } - }, - fieldQuote: { - serializedName: "FieldQuote", - xmlName: "FieldQuote", - type: { - name: "String" - } - }, - recordSeparator: { - serializedName: "RecordSeparator", - xmlName: "RecordSeparator", - type: { - name: "String" - } - }, - escapeChar: { - serializedName: "EscapeChar", - xmlName: "EscapeChar", - type: { - name: "String" - } - }, - headersPresent: { - serializedName: "HeadersPresent", - xmlName: "HasHeaders", - type: { - name: "Boolean" - } - } - } + return res; +} +/** + * Covert BlobTags to Tags type. + * + * @param tags - + */ +function toTags(tags) { + if (tags === undefined) { + return undefined; + } + const res = {}; + for (const blobTag of tags.blobTagSet) { + res[blobTag.key] = blobTag.value; + } + return res; +} +/** + * Convert BlobQueryTextConfiguration to QuerySerialization type. + * + * @param textConfiguration - + */ +function toQuerySerialization(textConfiguration) { + if (textConfiguration === undefined) { + return undefined; + } + switch (textConfiguration.kind) { + case "csv": + return { + format: { + type: "delimited", + delimitedTextConfiguration: { + columnSeparator: textConfiguration.columnSeparator || ",", + fieldQuote: textConfiguration.fieldQuote || "", + recordSeparator: textConfiguration.recordSeparator, + escapeChar: textConfiguration.escapeCharacter || "", + headersPresent: textConfiguration.hasHeaders || false, + }, + }, + }; + case "json": + return { + format: { + type: "json", + jsonTextConfiguration: { + recordSeparator: textConfiguration.recordSeparator, + }, + }, + }; + case "arrow": + return { + format: { + type: "arrow", + arrowConfiguration: { + schema: textConfiguration.schema, + }, + }, + }; + case "parquet": + return { + format: { + type: "parquet", + }, + }; + default: + throw Error("Invalid BlobQueryTextConfiguration."); + } +} +function parseObjectReplicationRecord(objectReplicationRecord) { + if (!objectReplicationRecord) { + return undefined; + } + if ("policy-id" in objectReplicationRecord) { + // If the dictionary contains a key with policy id, we are not required to do any parsing since + // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. + return undefined; } -}; -const JsonTextConfiguration = { - serializedName: "JsonTextConfiguration", - xmlName: "JsonTextConfiguration", - type: { - name: "Composite", - className: "JsonTextConfiguration", - modelProperties: { - recordSeparator: { - serializedName: "RecordSeparator", - xmlName: "RecordSeparator", - type: { - name: "String" - } - } + const orProperties = []; + for (const key in objectReplicationRecord) { + const ids = key.split("_"); + const policyPrefix = "or-"; + if (ids[0].startsWith(policyPrefix)) { + ids[0] = ids[0].substring(policyPrefix.length); } - } -}; -const ArrowConfiguration = { - serializedName: "ArrowConfiguration", - xmlName: "ArrowConfiguration", - type: { - name: "Composite", - className: "ArrowConfiguration", - modelProperties: { - schema: { - serializedName: "Schema", - required: true, - xmlName: "Schema", - xmlIsWrapped: true, - xmlElementName: "Field", - type: { - name: "Sequence", - element: { - type: { - name: "Composite", - className: "ArrowField" - } - } - } - } + const rule = { + ruleId: ids[1], + replicationStatus: objectReplicationRecord[key], + }; + const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); + if (policyIndex > -1) { + orProperties[policyIndex].rules.push(rule); } - } -}; -const ArrowField = { - serializedName: "ArrowField", - xmlName: "Field", - type: { - name: "Composite", - className: "ArrowField", - modelProperties: { - type: { - serializedName: "Type", - required: true, - xmlName: "Type", - type: { - name: "String" - } - }, - name: { - serializedName: "Name", - xmlName: "Name", - type: { - name: "String" - } - }, - precision: { - serializedName: "Precision", - xmlName: "Precision", - type: { - name: "Number" - } - }, - scale: { - serializedName: "Scale", - xmlName: "Scale", - type: { - name: "Number" - } - } + else { + orProperties.push({ + policyId: ids[0], + rules: [rule], + }); } } -}; -const ServiceSetPropertiesHeaders = { - serializedName: "Service_setPropertiesHeaders", - type: { - name: "Composite", - className: "ServiceSetPropertiesHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + return orProperties; +} +function httpAuthorizationToString(httpAuthorization) { + return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; +} +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); } -}; -const ServiceSetPropertiesExceptionHeaders = { - serializedName: "Service_setPropertiesExceptionHeaders", - type: { - name: "Composite", - className: "ServiceSetPropertiesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + else { + return name.content; } -}; -const ServiceGetPropertiesHeaders = { - serializedName: "Service_getPropertiesHeaders", - type: { - name: "Composite", - className: "ServiceGetPropertiesHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function* ExtractPageRangeInfoItems(getPageRangesSegment) { + let pageRange = []; + let clearRange = []; + if (getPageRangesSegment.pageRange) + pageRange = getPageRangesSegment.pageRange; + if (getPageRangesSegment.clearRange) + clearRange = getPageRangesSegment.clearRange; + let pageRangeIndex = 0; + let clearRangeIndex = 0; + while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { + if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; + ++pageRangeIndex; } - } -}; -const ServiceGetPropertiesExceptionHeaders = { - serializedName: "Service_getPropertiesExceptionHeaders", - type: { - name: "Composite", - className: "ServiceGetPropertiesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + else { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; + ++clearRangeIndex; } } -}; -const ServiceGetStatisticsHeaders = { - serializedName: "Service_getStatisticsHeaders", - type: { - name: "Composite", - className: "ServiceGetStatisticsHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { + yield { + start: pageRange[pageRangeIndex].start, + end: pageRange[pageRangeIndex].end, + isClear: false, + }; } -}; -const ServiceGetStatisticsExceptionHeaders = { - serializedName: "Service_getStatisticsExceptionHeaders", - type: { - name: "Composite", - className: "ServiceGetStatisticsExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { + yield { + start: clearRange[clearRangeIndex].start, + end: clearRange[clearRangeIndex].end, + isClear: true, + }; } -}; -const ServiceListContainersSegmentHeaders = { - serializedName: "Service_listContainersSegmentHeaders", - type: { - name: "Composite", - className: "ServiceListContainersSegmentHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } +} +/** + * Escape the blobName but keep path separator ('/'). + */ +function EscapePath(blobName) { + const split = blobName.split("/"); + for (let i = 0; i < split.length; i++) { + split[i] = encodeURIComponent(split[i]); } -}; -const ServiceListContainersSegmentExceptionHeaders = { - serializedName: "Service_listContainersSegmentExceptionHeaders", - type: { - name: "Composite", - className: "ServiceListContainersSegmentExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + return split.join("/"); +} +/** + * A typesafe helper for ensuring that a given response object has + * the original _response attached. + * @param response - A response object from calling a client operation + * @returns The same object, but with known _response property + */ +function assertResponse(response) { + if (`_response` in response) { + return response; } + throw new TypeError(`Unexpected response object ${response}`); +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * RetryPolicy types. + */ +exports.StorageRetryPolicyType = void 0; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS$1 = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy }; -const ServiceGetUserDelegationKeyHeaders = { - serializedName: "Service_getUserDelegationKeyHeaders", - type: { - name: "Composite", - className: "ServiceGetUserDelegationKeyHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } +const RETRY_ABORT_ERROR$1 = new abortController.AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +class StorageRetryPolicy extends BaseRequestPolicy { + /** + * Creates an instance of RetryPolicy. + * + * @param nextPolicy - + * @param options - + * @param retryOptions - + */ + constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS$1) { + super(nextPolicy, options); + // Initialize retry options + this.retryOptions = { + retryPolicyType: retryOptions.retryPolicyType + ? retryOptions.retryPolicyType + : DEFAULT_RETRY_OPTIONS$1.retryPolicyType, + maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 + ? Math.floor(retryOptions.maxTries) + : DEFAULT_RETRY_OPTIONS$1.maxTries, + tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 + ? retryOptions.tryTimeoutInMs + : DEFAULT_RETRY_OPTIONS$1.tryTimeoutInMs, + retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 + ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs) + : DEFAULT_RETRY_OPTIONS$1.retryDelayInMs, + maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 + ? retryOptions.maxRetryDelayInMs + : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs, + secondaryHost: retryOptions.secondaryHost + ? retryOptions.secondaryHost + : DEFAULT_RETRY_OPTIONS$1.secondaryHost, + }; + } + /** + * Sends request. + * + * @param request - + */ + async sendRequest(request) { + return this.attemptSendRequest(request, false, 1); + } + /** + * Decide and perform next retry. Won't mutate request parameter. + * + * @param request - + * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then + * the resource was not found. This may be due to replication delay. So, in this + * case, we'll never try the secondary again for this operation. + * @param attempt - How many retries has been attempted to performed, starting from 1, which includes + * the attempt will be performed by this method call. + */ + async attemptSendRequest(request, secondaryHas404, attempt) { + const newRequest = request.clone(); + const isPrimaryRetry = secondaryHas404 || + !this.retryOptions.secondaryHost || + !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || + attempt % 2 === 1; + if (!isPrimaryRetry) { + newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); + } + // Set the server-side timeout query parameter "timeout=[seconds]" + if (this.retryOptions.tryTimeoutInMs) { + newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); + } + let response; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await this._nextPolicy.sendRequest(newRequest); + if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { + return response; } + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); } - } -}; -const ServiceGetUserDelegationKeyExceptionHeaders = { - serializedName: "Service_getUserDelegationKeyExceptionHeaders", - type: { - name: "Composite", - className: "ServiceGetUserDelegationKeyExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + catch (err) { + logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); + if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { + throw err; } } + await this.delay(isPrimaryRetry, attempt, request.abortSignal); + return this.attemptSendRequest(request, secondaryHas404, ++attempt); } -}; -const ServiceGetAccountInfoHeaders = { - serializedName: "Service_getAccountInfoHeaders", - type: { - name: "Composite", - className: "ServiceGetAccountInfoHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - skuName: { - serializedName: "x-ms-sku-name", - xmlName: "x-ms-sku-name", - type: { - name: "Enum", - allowedValues: [ - "Standard_LRS", - "Standard_GRS", - "Standard_RAGRS", - "Standard_ZRS", - "Premium_LRS" - ] - } - }, - accountKind: { - serializedName: "x-ms-account-kind", - xmlName: "x-ms-account-kind", - type: { - name: "Enum", - allowedValues: [ - "Storage", - "BlobStorage", - "StorageV2", - "FileStorage", - "BlockBlobStorage" - ] - } - }, - isHierarchicalNamespaceEnabled: { - serializedName: "x-ms-is-hns-enabled", - xmlName: "x-ms-is-hns-enabled", - type: { - name: "Boolean" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + /** + * Decide whether to retry according to last HTTP response and retry counters. + * + * @param isPrimaryRetry - + * @param attempt - + * @param response - + * @param err - + */ + shouldRetry(isPrimaryRetry, attempt, response, err) { + if (attempt >= this.retryOptions.maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions + .maxTries}, no further try.`); + return false; + } + // Handle network failures, you may need to customize the list when you implement + // your own http client + const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js + ]; + if (err) { + for (const retriableError of retriableErrors) { + if (err.name.toUpperCase().includes(retriableError) || + err.message.toUpperCase().includes(retriableError) || + (err.code && err.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; } } } - } -}; -const ServiceGetAccountInfoExceptionHeaders = { - serializedName: "Service_getAccountInfoExceptionHeaders", - type: { - name: "Composite", - className: "ServiceGetAccountInfoExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || err) { + const statusCode = response ? response.status : err ? err.statusCode : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; } } + if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + return false; } -}; -const ServiceSubmitBatchHeaders = { - serializedName: "Service_submitBatchHeaders", - type: { - name: "Composite", - className: "ServiceSubmitBatchHeaders", - modelProperties: { - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + /** + * Delay a calculated time between retries. + * + * @param isPrimaryRetry - + * @param attempt - + * @param abortSignal - + */ + async delay(isPrimaryRetry, attempt, abortSignal) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (this.retryOptions.retryPolicyType) { + case exports.StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); + break; + case exports.StorageRetryPolicyType.FIXED: + delayTimeInMs = this.retryOptions.retryDelayInMs; + break; } } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR$1); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. + */ +class StorageRetryPolicyFactory { + /** + * Creates an instance of StorageRetryPolicyFactory. + * @param retryOptions - + */ + constructor(retryOptions) { + this.retryOptions = retryOptions; + } + /** + * Creates a StorageRetryPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Credential policy used to sign HTTP(S) requests before sending. This is an + * abstract class. + */ +class CredentialPolicy extends BaseRequestPolicy { + /** + * Sends out request. + * + * @param request - + */ + sendRequest(request) { + return this._nextPolicy.sendRequest(this.signRequest(request)); } -}; -const ServiceSubmitBatchExceptionHeaders = { - serializedName: "Service_submitBatchExceptionHeaders", - type: { - name: "Composite", - className: "ServiceSubmitBatchExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } + /** + * Child classes must implement this method with request signing. This method + * will be executed in {@link sendRequest}. + * + * @param request - + */ + signRequest(request) { + // Child classes must override this method with request signing. This method + // will be executed in sendRequest(). + return request; } -}; -const ServiceFilterBlobsHeaders = { - serializedName: "Service_filterBlobsHeaders", - type: { - name: "Composite", - className: "ServiceFilterBlobsHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. + */ +class StorageSharedKeyCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of StorageSharedKeyCredentialPolicy. + * @param nextPolicy - + * @param options - + * @param factory - + */ + constructor(nextPolicy, options, factory) { + super(nextPolicy, options); + this.factory = factory; + } + /** + * Signs request. + * + * @param request - + */ + signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } + const stringToSign = [ + request.method.toUpperCase(), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + this.getHeaderValueToSign(request, HeaderConstants.DATE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + this.getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + this.getCanonicalizedHeadersString(request) + + this.getCanonicalizedResourceString(request); + const signature = this.factory.computeHMACSHA256(stringToSign); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + return request; } -}; -const ServiceFilterBlobsExceptionHeaders = { - serializedName: "Service_filterBlobsExceptionHeaders", - type: { - name: "Composite", - className: "ServiceFilterBlobsExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + * + * @param request - + * @param headerName - + */ + getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; } + return value; } -}; -const ContainerCreateHeaders = { - serializedName: "Container_createHeaders", - type: { - name: "Composite", - className: "ContainerCreateHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + * @param request - + */ + getCanonicalizedHeadersString(request) { + let headersArray = request.headers.headersArray().filter((value) => { + return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); + }); + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; } - } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; } -}; -const ContainerCreateExceptionHeaders = { - serializedName: "Container_createExceptionHeaders", - type: { - name: "Composite", - className: "ContainerCreateExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + /** + * Retrieves the webResource canonicalized resource string. + * + * @param request - + */ + getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${this.factory.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); } } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } } + return canonicalizedResourceString; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Credential is an abstract class for Azure Storage HTTP requests signing. This + * class will host an credentialPolicyCreator factory which generates CredentialPolicy. + */ +class Credential { + /** + * Creates a RequestPolicy object. + * + * @param _nextPolicy - + * @param _options - + */ + create(_nextPolicy, _options) { + throw new Error("Method should be implemented in children classes."); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * + * StorageSharedKeyCredential for account key authorization of Azure Storage service. + */ +class StorageSharedKeyCredential extends Credential { + /** + * Creates an instance of StorageSharedKeyCredential. + * @param accountName - + * @param accountKey - + */ + constructor(accountName, accountKey) { + super(); + this.accountName = accountName; + this.accountKey = Buffer.from(accountKey, "base64"); + } + /** + * Creates a StorageSharedKeyCredentialPolicy object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + } + /** + * Generates a hash signature for an HTTP request or for a SAS. + * + * @param stringToSign - + */ + computeHMACSHA256(stringToSign) { + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources + * or for use with Shared Access Signatures (SAS). + */ +class AnonymousCredentialPolicy extends CredentialPolicy { + /** + * Creates an instance of AnonymousCredentialPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * AnonymousCredential provides a credentialPolicyCreator member used to create + * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with + * HTTP(S) requests that read public resources or for use with Shared Access + * Signatures (SAS). + */ +class AnonymousCredential extends Credential { + /** + * Creates an {@link AnonymousCredentialPolicy} object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new AnonymousCredentialPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +let _defaultHttpClient; +function getCachedDefaultHttpClient() { + if (!_defaultHttpClient) { + _defaultHttpClient = coreRestPipeline.createDefaultHttpClient(); } + return _defaultHttpClient; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the StorageBrowserPolicy. + */ +const storageBrowserPolicyName = "storageBrowserPolicy"; +/** + * storageBrowserPolicy is a policy used to prevent browsers from caching requests + * and to remove cookies and explicit content-length headers. + */ +function storageBrowserPolicy() { + return { + name: storageBrowserPolicyName, + async sendRequest(request, next) { + if (coreUtil.isNode) { + return next(request); + } + if (request.method === "GET" || request.method === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.delete(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.delete(HeaderConstants.CONTENT_LENGTH); + return next(request); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Name of the {@link storageRetryPolicy} + */ +const storageRetryPolicyName = "storageRetryPolicy"; +/** + * RetryPolicy types. + */ +var StorageRetryPolicyType; +(function (StorageRetryPolicyType) { + /** + * Exponential retry. Retry time delay grows exponentially. + */ + StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + /** + * Linear retry. Retry time delay grows linearly. + */ + StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; +})(StorageRetryPolicyType || (StorageRetryPolicyType = {})); +// Default values of StorageRetryOptions +const DEFAULT_RETRY_OPTIONS = { + maxRetryDelayInMs: 120 * 1000, + maxTries: 4, + retryDelayInMs: 4 * 1000, + retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, + secondaryHost: "", + tryTimeoutInMs: undefined, // Use server side default timeout strategy }; -const ContainerGetPropertiesHeaders = { - serializedName: "Container_getPropertiesHeaders", - type: { - name: "Composite", - className: "ContainerGetPropertiesHeaders", - modelProperties: { - metadata: { - serializedName: "x-ms-meta", - xmlName: "x-ms-meta", - type: { - name: "Dictionary", - value: { type: { name: "String" } } - }, - headerCollectionPrefix: "x-ms-meta-" - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseDuration: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", - type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } - }, - leaseState: { - serializedName: "x-ms-lease-state", - xmlName: "x-ms-lease-state", - type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } - }, - leaseStatus: { - serializedName: "x-ms-lease-status", - xmlName: "x-ms-lease-status", - type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - blobPublicAccess: { - serializedName: "x-ms-blob-public-access", - xmlName: "x-ms-blob-public-access", - type: { - name: "Enum", - allowedValues: ["container", "blob"] - } - }, - hasImmutabilityPolicy: { - serializedName: "x-ms-has-immutability-policy", - xmlName: "x-ms-has-immutability-policy", - type: { - name: "Boolean" - } - }, - hasLegalHold: { - serializedName: "x-ms-has-legal-hold", - xmlName: "x-ms-has-legal-hold", - type: { - name: "Boolean" - } - }, - defaultEncryptionScope: { - serializedName: "x-ms-default-encryption-scope", - xmlName: "x-ms-default-encryption-scope", - type: { - name: "String" +const retriableErrors = [ + "ETIMEDOUT", + "ESOCKETTIMEDOUT", + "ECONNREFUSED", + "ECONNRESET", + "ENOENT", + "ENOTFOUND", + "TIMEOUT", + "EPIPE", + "REQUEST_SEND_ERROR", +]; +const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); +/** + * Retry policy with exponential retry and linear retry implemented. + */ +function storageRetryPolicy(options = {}) { + var _a, _b, _c, _d, _e, _f; + const retryPolicyType = (_a = options.retryPolicyType) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_OPTIONS.retryPolicyType; + const maxTries = (_b = options.maxTries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_OPTIONS.maxTries; + const retryDelayInMs = (_c = options.retryDelayInMs) !== null && _c !== void 0 ? _c : DEFAULT_RETRY_OPTIONS.retryDelayInMs; + const maxRetryDelayInMs = (_d = options.maxRetryDelayInMs) !== null && _d !== void 0 ? _d : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; + const secondaryHost = (_e = options.secondaryHost) !== null && _e !== void 0 ? _e : DEFAULT_RETRY_OPTIONS.secondaryHost; + const tryTimeoutInMs = (_f = options.tryTimeoutInMs) !== null && _f !== void 0 ? _f : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; + function shouldRetry({ isPrimaryRetry, attempt, response, error, }) { + var _a, _b; + if (attempt >= maxTries) { + logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); + return false; + } + if (error) { + for (const retriableError of retriableErrors) { + if (error.name.toUpperCase().includes(retriableError) || + error.message.toUpperCase().includes(retriableError) || + (error.code && error.code.toString().toUpperCase() === retriableError)) { + logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); + return true; } - }, - denyEncryptionScopeOverride: { - serializedName: "x-ms-deny-encryption-scope-override", - xmlName: "x-ms-deny-encryption-scope-override", - type: { - name: "Boolean" + } + if ((error === null || error === void 0 ? void 0 : error.code) === "PARSE_ERROR" && + (error === null || error === void 0 ? void 0 : error.message.startsWith(`Error "Error: Unclosed root tag`))) { + logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); + return true; + } + } + // If attempt was against the secondary & it returned a StatusNotFound (404), then + // the resource was not found. This may be due to replication delay. So, in this + // case, we'll never try the secondary again for this operation. + if (response || error) { + const statusCode = (_b = (_a = response === null || response === void 0 ? void 0 : response.status) !== null && _a !== void 0 ? _a : error === null || error === void 0 ? void 0 : error.statusCode) !== null && _b !== void 0 ? _b : 0; + if (!isPrimaryRetry && statusCode === 404) { + logger.info(`RetryPolicy: Secondary access with 404, will retry.`); + return true; + } + // Server internal error or server timeout + if (statusCode === 503 || statusCode === 500) { + logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); + return true; + } + } + return false; + } + function calculateDelay(isPrimaryRetry, attempt) { + let delayTimeInMs = 0; + if (isPrimaryRetry) { + switch (retryPolicyType) { + case StorageRetryPolicyType.EXPONENTIAL: + delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); + break; + case StorageRetryPolicyType.FIXED: + delayTimeInMs = retryDelayInMs; + break; + } + } + else { + delayTimeInMs = Math.random() * 1000; + } + logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); + return delayTimeInMs; + } + return { + name: storageRetryPolicyName, + async sendRequest(request, next) { + // Set the server-side timeout query parameter "timeout=[seconds]" + if (tryTimeoutInMs) { + request.url = setURLParameter(request.url, URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1000))); + } + const primaryUrl = request.url; + const secondaryUrl = secondaryHost ? setURLHost(request.url, secondaryHost) : undefined; + let secondaryHas404 = false; + let attempt = 1; + let retryAgain = true; + let response; + let error; + while (retryAgain) { + const isPrimaryRetry = secondaryHas404 || + !secondaryUrl || + !["GET", "HEAD", "OPTIONS"].includes(request.method) || + attempt % 2 === 1; + request.url = isPrimaryRetry ? primaryUrl : secondaryUrl; + response = undefined; + error = undefined; + try { + logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); + response = await next(request); + secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); + } + catch (e) { + if (coreRestPipeline.isRestError(e)) { + logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); + error = e; + } + else { + logger.error(`RetryPolicy: Caught error, message: ${coreUtil.getErrorMessage(e)}`); + throw e; + } } - }, - isImmutableStorageWithVersioningEnabled: { - serializedName: "x-ms-immutable-storage-with-versioning-enabled", - xmlName: "x-ms-immutable-storage-with-versioning-enabled", - type: { - name: "Boolean" + retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error }); + if (retryAgain) { + await delay(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" + attempt++; + } + if (response) { + return response; + } + throw error !== null && error !== void 0 ? error : new coreRestPipeline.RestError("RetryPolicy failed without known error."); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The programmatic identifier of the storageSharedKeyCredentialPolicy. + */ +const storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; +/** + * storageSharedKeyCredentialPolicy handles signing requests using storage account keys. + */ +function storageSharedKeyCredentialPolicy(options) { + function signRequest(request) { + request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); + if (request.body && + (typeof request.body === "string" || Buffer.isBuffer(request.body)) && + request.body.length > 0) { + request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); + } + const stringToSign = [ + request.method.toUpperCase(), + getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), + getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), + getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), + getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), + getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), + getHeaderValueToSign(request, HeaderConstants.DATE), + getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), + getHeaderValueToSign(request, HeaderConstants.IF_MATCH), + getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), + getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), + getHeaderValueToSign(request, HeaderConstants.RANGE), + ].join("\n") + + "\n" + + getCanonicalizedHeadersString(request) + + getCanonicalizedResourceString(request); + const signature = crypto.createHmac("sha256", options.accountKey) + .update(stringToSign, "utf8") + .digest("base64"); + request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); + // console.log(`[URL]:${request.url}`); + // console.log(`[HEADERS]:${request.headers.toString()}`); + // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); + // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); + } + /** + * Retrieve header value according to shared key sign rules. + * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + */ + function getHeaderValueToSign(request, headerName) { + const value = request.headers.get(headerName); + if (!value) { + return ""; + } + // When using version 2015-02-21 or later, if Content-Length is zero, then + // set the Content-Length part of the StringToSign to an empty string. + // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key + if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { + return ""; + } + return value; + } + /** + * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: + * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. + * 2. Convert each HTTP header name to lowercase. + * 3. Sort the headers lexicographically by header name, in ascending order. + * Each header may appear only once in the string. + * 4. Replace any linear whitespace in the header value with a single space. + * 5. Trim any whitespace around the colon in the header. + * 6. Finally, append a new-line character to each canonicalized header in the resulting list. + * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. + * + */ + function getCanonicalizedHeadersString(request) { + let headersArray = []; + for (const [name, value] of request.headers) { + if (name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE)) { + headersArray.push({ name, value }); + } + } + headersArray.sort((a, b) => { + return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); + }); + // Remove duplicate headers + headersArray = headersArray.filter((value, index, array) => { + if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { + return false; + } + return true; + }); + let canonicalizedHeadersStringToSign = ""; + headersArray.forEach((header) => { + canonicalizedHeadersStringToSign += `${header.name + .toLowerCase() + .trimRight()}:${header.value.trimLeft()}\n`; + }); + return canonicalizedHeadersStringToSign; + } + function getCanonicalizedResourceString(request) { + const path = getURLPath(request.url) || "/"; + let canonicalizedResourceString = ""; + canonicalizedResourceString += `/${options.accountName}${path}`; + const queries = getURLQueries(request.url); + const lowercaseQueries = {}; + if (queries) { + const queryKeys = []; + for (const key in queries) { + if (Object.prototype.hasOwnProperty.call(queries, key)) { + const lowercaseKey = key.toLowerCase(); + lowercaseQueries[lowercaseKey] = queries[key]; + queryKeys.push(lowercaseKey); } } + queryKeys.sort(); + for (const key of queryKeys) { + canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; + } + } + return canonicalizedResourceString; + } + return { + name: storageSharedKeyCredentialPolicyName, + async sendRequest(request, next) { + signRequest(request); + return next(request); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: + * + * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. + * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL + * thus avoid the browser cache. + * + * 2. Remove cookie header for security + * + * 3. Remove content-length header to avoid browsers warning + */ +class StorageBrowserPolicy extends BaseRequestPolicy { + /** + * Creates an instance of StorageBrowserPolicy. + * @param nextPolicy - + * @param options - + */ + // The base class has a protected constructor. Adding a public one to enable constructing of this class. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + */ + async sendRequest(request) { + if (coreUtil.isNode) { + return this._nextPolicy.sendRequest(request); } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. + */ +class StorageBrowserPolicyFactory { + /** + * Creates a StorageBrowserPolicyFactory object. + * + * @param nextPolicy - + * @param options - + */ + create(nextPolicy, options) { + return new StorageBrowserPolicy(nextPolicy, options); + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * A helper to decide if a given argument satisfies the Pipeline contract + * @param pipeline - An argument that may be a Pipeline + * @returns true when the argument satisfies the Pipeline contract + */ +function isPipelineLike(pipeline) { + if (!pipeline || typeof pipeline !== "object") { + return false; + } + const castPipeline = pipeline; + return (Array.isArray(castPipeline.factories) && + typeof castPipeline.options === "object" && + typeof castPipeline.toServiceClientOptions === "function"); +} +/** + * A Pipeline class containing HTTP request policies. + * You can create a default Pipeline by calling {@link newPipeline}. + * Or you can create a Pipeline with your own policies by the constructor of Pipeline. + * + * Refer to {@link newPipeline} and provided policies before implementing your + * customized Pipeline. + */ +class Pipeline { + /** + * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. + * + * @param factories - + * @param options - + */ + constructor(factories, options = {}) { + this.factories = factories; + this.options = options; + } + /** + * Transfer Pipeline object to ServiceClientOptions object which is required by + * ServiceClient constructor. + * + * @returns The ServiceClientOptions object from this Pipeline. + */ + toServiceClientOptions() { + return { + httpClient: this.options.httpClient, + requestPolicyFactories: this.factories, + }; } -}; -const ContainerGetPropertiesExceptionHeaders = { - serializedName: "Container_getPropertiesExceptionHeaders", - type: { - name: "Composite", - className: "ContainerGetPropertiesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } +} +/** + * Creates a new Pipeline object with Credential provided. + * + * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. + * @param pipelineOptions - Optional. Options. + * @returns A new Pipeline object. + */ +function newPipeline(credential, pipelineOptions = {}) { + if (!credential) { + credential = new AnonymousCredential(); } -}; -const ContainerDeleteHeaders = { - serializedName: "Container_deleteHeaders", - type: { - name: "Composite", - className: "ContainerDeleteHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + const pipeline = new Pipeline([], pipelineOptions); + pipeline._credential = credential; + return pipeline; +} +function processDownlevelPipeline(pipeline) { + const knownFactoryFunctions = [ + isAnonymousCredential, + isStorageSharedKeyCredential, + isCoreHttpBearerTokenFactory, + isStorageBrowserPolicyFactory, + isStorageRetryPolicyFactory, + isStorageTelemetryPolicyFactory, + isCoreHttpPolicyFactory, + ]; + if (pipeline.factories.length) { + const novelFactories = pipeline.factories.filter((factory) => { + return !knownFactoryFunctions.some((knownFactory) => knownFactory(factory)); + }); + if (novelFactories.length) { + const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory)); + // if there are any left over, wrap in a requestPolicyFactoryPolicy + return { + wrappedPolicies: coreHttpCompat.createRequestPolicyFactoryPolicy(novelFactories), + afterRetry: hasInjector, + }; } } -}; -const ContainerDeleteExceptionHeaders = { - serializedName: "Container_deleteExceptionHeaders", - type: { - name: "Composite", - className: "ContainerDeleteExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } + return undefined; +} +function getCoreClientOptions(pipeline) { + var _a; + const _b = pipeline.options, { httpClient: v1Client } = _b, restOptions = tslib.__rest(_b, ["httpClient"]); + let httpClient = pipeline._coreHttpClient; + if (!httpClient) { + httpClient = v1Client ? coreHttpCompat.convertHttpClient(v1Client) : getCachedDefaultHttpClient(); + pipeline._coreHttpClient = httpClient; + } + let corePipeline = pipeline._corePipeline; + if (!corePipeline) { + const packageDetails = `azsdk-js-azure-storage-blob/${SDK_VERSION}`; + const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix + ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + corePipeline = coreClient.createClientPipeline(Object.assign(Object.assign({}, restOptions), { loggingOptions: { + additionalAllowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, + additionalAllowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + logger: logger.info, + }, userAgentOptions: { + userAgentPrefix, + }, serializationOptions: { + stringifyXML: coreXml.stringifyXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#", + }, + }, + }, deserializationOptions: { + parseXML: coreXml.parseXML, + serializerOptions: { + xml: { + // Use customized XML char key of "#" so we can deserialize metadata + // with "_" key + xmlCharKey: "#", + }, + }, + } })); + corePipeline.removePolicy({ phase: "Retry" }); + corePipeline.removePolicy({ name: coreRestPipeline.decompressResponsePolicyName }); + corePipeline.addPolicy(storageRetryPolicy(restOptions.retryOptions), { phase: "Retry" }); + corePipeline.addPolicy(storageBrowserPolicy()); + const downlevelResults = processDownlevelPipeline(pipeline); + if (downlevelResults) { + corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : undefined); + } + const credential = getCredentialFromPipeline(pipeline); + if (coreAuth.isTokenCredential(credential)) { + corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + credential, + scopes: (_a = restOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge }, + }), { phase: "Sign" }); + } + else if (credential instanceof StorageSharedKeyCredential) { + corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + accountName: credential.accountName, + accountKey: credential.accountKey, + }), { phase: "Sign" }); + } + pipeline._corePipeline = corePipeline; + } + return Object.assign(Object.assign({}, restOptions), { allowInsecureConnection: true, httpClient, pipeline: corePipeline }); +} +function getCredentialFromPipeline(pipeline) { + // see if we squirreled one away on the type itself + if (pipeline._credential) { + return pipeline._credential; + } + // if it came from another package, loop over the factories and look for one like before + let credential = new AnonymousCredential(); + for (const factory of pipeline.factories) { + if (coreAuth.isTokenCredential(factory.credential)) { + // Only works if the factory has been attached a "credential" property. + // We do that in newPipeline() when using TokenCredential. + credential = factory.credential; + } + else if (isStorageSharedKeyCredential(factory)) { + return factory; } } -}; -const ContainerSetMetadataHeaders = { - serializedName: "Container_setMetadataHeaders", + return credential; +} +function isStorageSharedKeyCredential(factory) { + if (factory instanceof StorageSharedKeyCredential) { + return true; + } + return factory.constructor.name === "StorageSharedKeyCredential"; +} +function isAnonymousCredential(factory) { + if (factory instanceof AnonymousCredential) { + return true; + } + return factory.constructor.name === "AnonymousCredential"; +} +function isCoreHttpBearerTokenFactory(factory) { + return coreAuth.isTokenCredential(factory.credential); +} +function isStorageBrowserPolicyFactory(factory) { + if (factory instanceof StorageBrowserPolicyFactory) { + return true; + } + return factory.constructor.name === "StorageBrowserPolicyFactory"; +} +function isStorageRetryPolicyFactory(factory) { + if (factory instanceof StorageRetryPolicyFactory) { + return true; + } + return factory.constructor.name === "StorageRetryPolicyFactory"; +} +function isStorageTelemetryPolicyFactory(factory) { + return factory.constructor.name === "TelemetryPolicyFactory"; +} +function isInjectorPolicyFactory(factory) { + return factory.constructor.name === "InjectorPolicyFactory"; +} +function isCoreHttpPolicyFactory(factory) { + const knownPolicies = [ + "GenerateClientRequestIdPolicy", + "TracingPolicy", + "LogPolicy", + "ProxyPolicy", + "DisableResponseDecompressionPolicy", + "KeepAlivePolicy", + "DeserializationPolicy", + ]; + const mockHttpClient = { + sendRequest: async (request) => { + return { + request, + headers: request.headers.clone(), + status: 500, + }; + }, + }; + const mockRequestPolicyOptions = { + log(_logLevel, _message) { + /* do nothing */ + }, + shouldLog(_logLevel) { + return false; + }, + }; + const policyInstance = factory.create(mockHttpClient, mockRequestPolicyOptions); + const policyName = policyInstance.constructor.name; + // bundlers sometimes add a custom suffix to the class name to make it unique + return knownPolicies.some((knownPolicyName) => { + return policyName.startsWith(knownPolicyName); + }); +} + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const BlobServiceProperties = { + serializedName: "BlobServiceProperties", + xmlName: "StorageServiceProperties", type: { name: "Composite", - className: "ContainerSetMetadataHeaders", + className: "BlobServiceProperties", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + blobAnalyticsLogging: { + serializedName: "Logging", + xmlName: "Logging", type: { - name: "String" - } + name: "Composite", + className: "Logging", + }, }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + hourMetrics: { + serializedName: "HourMetrics", + xmlName: "HourMetrics", type: { - name: "DateTimeRfc1123" - } + name: "Composite", + className: "Metrics", + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + minuteMetrics: { + serializedName: "MinuteMetrics", + xmlName: "MinuteMetrics", type: { - name: "String" - } + name: "Composite", + className: "Metrics", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + cors: { + serializedName: "Cors", + xmlName: "Cors", + xmlIsWrapped: true, + xmlElementName: "CorsRule", type: { - name: "String" - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "CorsRule", + }, + }, + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + defaultServiceVersion: { + serializedName: "DefaultServiceVersion", + xmlName: "DefaultServiceVersion", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", + deleteRetentionPolicy: { + serializedName: "DeleteRetentionPolicy", + xmlName: "DeleteRetentionPolicy", type: { - name: "DateTimeRfc1123" - } + name: "Composite", + className: "RetentionPolicy", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerSetMetadataExceptionHeaders = { - serializedName: "Container_setMetadataExceptionHeaders", - type: { - name: "Composite", - className: "ContainerSetMetadataExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + staticWebsite: { + serializedName: "StaticWebsite", + xmlName: "StaticWebsite", type: { - name: "String" - } - } - } - } + name: "Composite", + className: "StaticWebsite", + }, + }, + }, + }, }; -const ContainerGetAccessPolicyHeaders = { - serializedName: "Container_getAccessPolicyHeaders", +const Logging = { + serializedName: "Logging", type: { name: "Composite", - className: "ContainerGetAccessPolicyHeaders", + className: "Logging", modelProperties: { - blobPublicAccess: { - serializedName: "x-ms-blob-public-access", - xmlName: "x-ms-blob-public-access", - type: { - name: "Enum", - allowedValues: ["container", "blob"] - } - }, - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + version: { + serializedName: "Version", + required: true, + xmlName: "Version", type: { - name: "String" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + deleteProperty: { + serializedName: "Delete", + required: true, + xmlName: "Delete", type: { - name: "String" - } + name: "Boolean", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + read: { + serializedName: "Read", + required: true, + xmlName: "Read", type: { - name: "String" - } + name: "Boolean", + }, }, - date: { - serializedName: "date", - xmlName: "date", + write: { + serializedName: "Write", + required: true, + xmlName: "Write", type: { - name: "DateTimeRfc1123" - } + name: "Boolean", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", type: { - name: "String" - } - } - } - } + name: "Composite", + className: "RetentionPolicy", + }, + }, + }, + }, }; -const ContainerGetAccessPolicyExceptionHeaders = { - serializedName: "Container_getAccessPolicyExceptionHeaders", +const RetentionPolicy = { + serializedName: "RetentionPolicy", type: { name: "Composite", - className: "ContainerGetAccessPolicyExceptionHeaders", + className: "RetentionPolicy", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", type: { - name: "String" - } - } - } - } + name: "Boolean", + }, + }, + days: { + constraints: { + InclusiveMinimum: 1, + }, + serializedName: "Days", + xmlName: "Days", + type: { + name: "Number", + }, + }, + }, + }, }; -const ContainerSetAccessPolicyHeaders = { - serializedName: "Container_setAccessPolicyHeaders", +const Metrics = { + serializedName: "Metrics", type: { name: "Composite", - className: "ContainerSetAccessPolicyHeaders", + className: "Metrics", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + version: { + serializedName: "Version", + xmlName: "Version", type: { - name: "String" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", type: { - name: "String" - } + name: "Boolean", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + includeAPIs: { + serializedName: "IncludeAPIs", + xmlName: "IncludeAPIs", type: { - name: "String" - } + name: "Boolean", + }, }, - date: { - serializedName: "date", - xmlName: "date", + retentionPolicy: { + serializedName: "RetentionPolicy", + xmlName: "RetentionPolicy", type: { - name: "DateTimeRfc1123" - } + name: "Composite", + className: "RetentionPolicy", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerSetAccessPolicyExceptionHeaders = { - serializedName: "Container_setAccessPolicyExceptionHeaders", - type: { - name: "Composite", - className: "ContainerSetAccessPolicyExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const ContainerRestoreHeaders = { - serializedName: "Container_restoreHeaders", +const CorsRule = { + serializedName: "CorsRule", type: { name: "Composite", - className: "ContainerRestoreHeaders", + className: "CorsRule", modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + allowedOrigins: { + serializedName: "AllowedOrigins", + required: true, + xmlName: "AllowedOrigins", type: { - name: "String" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + allowedMethods: { + serializedName: "AllowedMethods", + required: true, + xmlName: "AllowedMethods", type: { - name: "String" - } + name: "String", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + allowedHeaders: { + serializedName: "AllowedHeaders", + required: true, + xmlName: "AllowedHeaders", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", + exposedHeaders: { + serializedName: "ExposedHeaders", + required: true, + xmlName: "ExposedHeaders", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + maxAgeInSeconds: { + constraints: { + InclusiveMinimum: 0, + }, + serializedName: "MaxAgeInSeconds", + required: true, + xmlName: "MaxAgeInSeconds", type: { - name: "String" - } - } - } - } + name: "Number", + }, + }, + }, + }, }; -const ContainerRestoreExceptionHeaders = { - serializedName: "Container_restoreExceptionHeaders", +const StaticWebsite = { + serializedName: "StaticWebsite", type: { name: "Composite", - className: "ContainerRestoreExceptionHeaders", + className: "StaticWebsite", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + enabled: { + serializedName: "Enabled", + required: true, + xmlName: "Enabled", type: { - name: "String" - } - } - } - } -}; -const ContainerRenameHeaders = { - serializedName: "Container_renameHeaders", - type: { - name: "Composite", - className: "ContainerRenameHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + name: "Boolean", + }, + }, + indexDocument: { + serializedName: "IndexDocument", + xmlName: "IndexDocument", type: { - name: "String" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + errorDocument404Path: { + serializedName: "ErrorDocument404Path", + xmlName: "ErrorDocument404Path", type: { - name: "String" - } + name: "String", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + defaultIndexDocumentPath: { + serializedName: "DefaultIndexDocumentPath", + xmlName: "DefaultIndexDocumentPath", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", + }, + }, +}; +const StorageError = { + serializedName: "StorageError", + type: { + name: "Composite", + className: "StorageError", + modelProperties: { + message: { + serializedName: "Message", + xmlName: "Message", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + code: { + serializedName: "Code", + xmlName: "Code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ContainerRenameExceptionHeaders = { - serializedName: "Container_renameExceptionHeaders", +const BlobServiceStatistics = { + serializedName: "BlobServiceStatistics", + xmlName: "StorageServiceStats", type: { name: "Composite", - className: "ContainerRenameExceptionHeaders", + className: "BlobServiceStatistics", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + geoReplication: { + serializedName: "GeoReplication", + xmlName: "GeoReplication", type: { - name: "String" - } - } - } - } + name: "Composite", + className: "GeoReplication", + }, + }, + }, + }, }; -const ContainerSubmitBatchHeaders = { - serializedName: "Container_submitBatchHeaders", +const GeoReplication = { + serializedName: "GeoReplication", type: { name: "Composite", - className: "ContainerSubmitBatchHeaders", + className: "GeoReplication", modelProperties: { - contentType: { - serializedName: "content-type", - xmlName: "content-type", + status: { + serializedName: "Status", + required: true, + xmlName: "Status", type: { - name: "String" - } + name: "Enum", + allowedValues: ["live", "bootstrap", "unavailable"], + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + lastSyncOn: { + serializedName: "LastSyncTime", + required: true, + xmlName: "LastSyncTime", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - } - } - } + }, + }, }; -const ContainerSubmitBatchExceptionHeaders = { - serializedName: "Container_submitBatchExceptionHeaders", +const ListContainersSegmentResponse = { + serializedName: "ListContainersSegmentResponse", + xmlName: "EnumerationResults", type: { name: "Composite", - className: "ContainerSubmitBatchExceptionHeaders", + className: "ListContainersSegmentResponse", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", + type: { + name: "String", + }, + }, + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String", + }, + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number", + }, + }, + containerItems: { + serializedName: "ContainerItems", + required: true, + xmlName: "Containers", + xmlIsWrapped: true, + xmlElementName: "Container", + type: { + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ContainerItem", + }, + }, + }, + }, + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", + type: { + name: "String", + }, + }, + }, + }, }; -const ContainerFilterBlobsHeaders = { - serializedName: "Container_filterBlobsHeaders", +const ContainerItem = { + serializedName: "ContainerItem", + xmlName: "Container", type: { name: "Composite", - className: "ContainerFilterBlobsHeaders", + className: "ContainerItem", modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + name: { + serializedName: "Name", + required: true, + xmlName: "Name", type: { - name: "String" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + deleted: { + serializedName: "Deleted", + xmlName: "Deleted", type: { - name: "String" - } + name: "Boolean", + }, }, version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + serializedName: "Version", + xmlName: "Version", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", + properties: { + serializedName: "Properties", + xmlName: "Properties", type: { - name: "DateTimeRfc1123" - } - } - } - } -}; -const ContainerFilterBlobsExceptionHeaders = { - serializedName: "Container_filterBlobsExceptionHeaders", - type: { - name: "Composite", - className: "ContainerFilterBlobsExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + name: "Composite", + className: "ContainerProperties", + }, + }, + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", type: { - name: "String" - } - } - } - } + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + }, + }, }; -const ContainerAcquireLeaseHeaders = { - serializedName: "Container_acquireLeaseHeaders", +const ContainerProperties = { + serializedName: "ContainerProperties", type: { name: "Composite", - className: "ContainerAcquireLeaseHeaders", + className: "ContainerProperties", modelProperties: { + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", + type: { + name: "DateTimeRfc1123", + }, + }, etag: { - serializedName: "etag", - xmlName: "etag", + serializedName: "Etag", + required: true, + xmlName: "Etag", type: { - name: "String" - } + name: "String", + }, }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", type: { - name: "DateTimeRfc1123" - } + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", + leaseState: { + serializedName: "LeaseState", + xmlName: "LeaseState", type: { - name: "String" - } + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", type: { - name: "String" - } + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + publicAccess: { + serializedName: "PublicAccess", + xmlName: "PublicAccess", type: { - name: "String" - } + name: "Enum", + allowedValues: ["container", "blob"], + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + hasImmutabilityPolicy: { + serializedName: "HasImmutabilityPolicy", + xmlName: "HasImmutabilityPolicy", type: { - name: "String" - } + name: "Boolean", + }, }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } - } - } -}; -const ContainerAcquireLeaseExceptionHeaders = { - serializedName: "Container_acquireLeaseExceptionHeaders", - type: { - name: "Composite", - className: "ContainerAcquireLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerReleaseLeaseHeaders = { - serializedName: "Container_releaseLeaseHeaders", - type: { - name: "Composite", - className: "ContainerReleaseLeaseHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + hasLegalHold: { + serializedName: "HasLegalHold", + xmlName: "HasLegalHold", type: { - name: "String" - } + name: "Boolean", + }, }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + defaultEncryptionScope: { + serializedName: "DefaultEncryptionScope", + xmlName: "DefaultEncryptionScope", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + preventEncryptionScopeOverride: { + serializedName: "DenyEncryptionScopeOverride", + xmlName: "DenyEncryptionScopeOverride", type: { - name: "String" - } + name: "Boolean", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", type: { - name: "String" - } + name: "Number", + }, }, - date: { - serializedName: "date", - xmlName: "date", + isImmutableStorageWithVersioningEnabled: { + serializedName: "ImmutableStorageWithVersioningEnabled", + xmlName: "ImmutableStorageWithVersioningEnabled", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, }; -const ContainerReleaseLeaseExceptionHeaders = { - serializedName: "Container_releaseLeaseExceptionHeaders", +const KeyInfo = { + serializedName: "KeyInfo", type: { name: "Composite", - className: "ContainerReleaseLeaseExceptionHeaders", + className: "KeyInfo", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + startsOn: { + serializedName: "Start", + required: true, + xmlName: "Start", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + expiresOn: { + serializedName: "Expiry", + required: true, + xmlName: "Expiry", + type: { + name: "String", + }, + }, + }, + }, }; -const ContainerRenewLeaseHeaders = { - serializedName: "Container_renewLeaseHeaders", +const UserDelegationKey = { + serializedName: "UserDelegationKey", type: { name: "Composite", - className: "ContainerRenewLeaseHeaders", + className: "UserDelegationKey", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + signedObjectId: { + serializedName: "SignedOid", + required: true, + xmlName: "SignedOid", type: { - name: "String" - } + name: "String", + }, }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + signedTenantId: { + serializedName: "SignedTid", + required: true, + xmlName: "SignedTid", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", + signedStartsOn: { + serializedName: "SignedStart", + required: true, + xmlName: "SignedStart", type: { - name: "String" - } + name: "String", + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + signedExpiresOn: { + serializedName: "SignedExpiry", + required: true, + xmlName: "SignedExpiry", type: { - name: "String" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + signedService: { + serializedName: "SignedService", + required: true, + xmlName: "SignedService", type: { - name: "String" - } + name: "String", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + signedVersion: { + serializedName: "SignedVersion", + required: true, + xmlName: "SignedVersion", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - } - } - } -}; -const ContainerRenewLeaseExceptionHeaders = { - serializedName: "Container_renewLeaseExceptionHeaders", - type: { - name: "Composite", - className: "ContainerRenewLeaseExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + value: { + serializedName: "Value", + required: true, + xmlName: "Value", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ContainerBreakLeaseHeaders = { - serializedName: "Container_breakLeaseHeaders", +const FilterBlobSegment = { + serializedName: "FilterBlobSegment", + xmlName: "EnumerationResults", type: { name: "Composite", - className: "ContainerBreakLeaseHeaders", + className: "FilterBlobSegment", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + where: { + serializedName: "Where", + required: true, + xmlName: "Where", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - leaseTime: { - serializedName: "x-ms-lease-time", - xmlName: "x-ms-lease-time", + blobs: { + serializedName: "Blobs", + required: true, + xmlName: "Blobs", + xmlIsWrapped: true, + xmlElementName: "Blob", type: { - name: "Number" - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "FilterBlobItem", + }, + }, + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", type: { - name: "String" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + }, + }, +}; +const FilterBlobItem = { + serializedName: "FilterBlobItem", + xmlName: "Blob", + type: { + name: "Composite", + className: "FilterBlobItem", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", type: { - name: "String" - } + name: "String", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", + tags: { + serializedName: "Tags", + xmlName: "Tags", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "Composite", + className: "BlobTags", + }, + }, + }, + }, }; -const ContainerBreakLeaseExceptionHeaders = { - serializedName: "Container_breakLeaseExceptionHeaders", +const BlobTags = { + serializedName: "BlobTags", + xmlName: "Tags", type: { name: "Composite", - className: "ContainerBreakLeaseExceptionHeaders", + className: "BlobTags", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + blobTagSet: { + serializedName: "BlobTagSet", + required: true, + xmlName: "TagSet", + xmlIsWrapped: true, + xmlElementName: "Tag", type: { - name: "String" - } - } - } - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobTag", + }, + }, + }, + }, + }, + }, }; -const ContainerChangeLeaseHeaders = { - serializedName: "Container_changeLeaseHeaders", +const BlobTag = { + serializedName: "BlobTag", + xmlName: "Tag", type: { name: "Composite", - className: "ContainerChangeLeaseHeaders", + className: "BlobTag", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + key: { + serializedName: "Key", + required: true, + xmlName: "Key", type: { - name: "String" - } + name: "String", + }, }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + value: { + serializedName: "Value", + required: true, + xmlName: "Value", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", + }, + }, +}; +const SignedIdentifier = { + serializedName: "SignedIdentifier", + xmlName: "SignedIdentifier", + type: { + name: "Composite", + className: "SignedIdentifier", + modelProperties: { + id: { + serializedName: "Id", + required: true, + xmlName: "Id", type: { - name: "String" - } + name: "String", + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + accessPolicy: { + serializedName: "AccessPolicy", + xmlName: "AccessPolicy", type: { - name: "String" - } + name: "Composite", + className: "AccessPolicy", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + }, + }, +}; +const AccessPolicy = { + serializedName: "AccessPolicy", + type: { + name: "Composite", + className: "AccessPolicy", + modelProperties: { + startsOn: { + serializedName: "Start", + xmlName: "Start", type: { - name: "String" - } + name: "String", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + expiresOn: { + serializedName: "Expiry", + xmlName: "Expiry", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", + permissions: { + serializedName: "Permission", + xmlName: "Permission", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const ContainerChangeLeaseExceptionHeaders = { - serializedName: "Container_changeLeaseExceptionHeaders", +const ListBlobsFlatSegmentResponse = { + serializedName: "ListBlobsFlatSegmentResponse", + xmlName: "EnumerationResults", type: { name: "Composite", - className: "ContainerChangeLeaseExceptionHeaders", + className: "ListBlobsFlatSegmentResponse", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, type: { - name: "String" - } - } - } - } -}; -const ContainerListBlobFlatSegmentHeaders = { - serializedName: "Container_listBlobFlatSegmentHeaders", - type: { - name: "Composite", - className: "ContainerListBlobFlatSegmentHeaders", - modelProperties: { - contentType: { - serializedName: "content-type", - xmlName: "content-type", + name: "String", + }, + }, + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, type: { - name: "String" - } + name: "String", + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", type: { - name: "String" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + marker: { + serializedName: "Marker", + xmlName: "Marker", type: { - name: "String" - } + name: "String", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number", + }, + }, + segment: { + serializedName: "Segment", + xmlName: "Blobs", type: { - name: "String" - } + name: "Composite", + className: "BlobFlatListSegment", + }, }, - date: { - serializedName: "date", - xmlName: "date", + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const ContainerListBlobFlatSegmentExceptionHeaders = { - serializedName: "Container_listBlobFlatSegmentExceptionHeaders", +const BlobFlatListSegment = { + serializedName: "BlobFlatListSegment", + xmlName: "Blobs", type: { name: "Composite", - className: "ContainerListBlobFlatSegmentExceptionHeaders", + className: "BlobFlatListSegment", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", type: { - name: "String" - } - } - } - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal", + }, + }, + }, + }, + }, + }, }; -const ContainerListBlobHierarchySegmentHeaders = { - serializedName: "Container_listBlobHierarchySegmentHeaders", +const BlobItemInternal = { + serializedName: "BlobItemInternal", + xmlName: "Blob", type: { name: "Composite", - className: "ContainerListBlobHierarchySegmentHeaders", + className: "BlobItemInternal", modelProperties: { - contentType: { - serializedName: "content-type", - xmlName: "content-type", + name: { + serializedName: "Name", + xmlName: "Name", type: { - name: "String" - } + name: "Composite", + className: "BlobName", + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + deleted: { + serializedName: "Deleted", + required: true, + xmlName: "Deleted", type: { - name: "String" - } + name: "Boolean", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + snapshot: { + serializedName: "Snapshot", + required: true, + xmlName: "Snapshot", type: { - name: "String" - } + name: "String", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + versionId: { + serializedName: "VersionId", + xmlName: "VersionId", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", + isCurrentVersion: { + serializedName: "IsCurrentVersion", + xmlName: "IsCurrentVersion", type: { - name: "DateTimeRfc1123" - } + name: "Boolean", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerListBlobHierarchySegmentExceptionHeaders = { - serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", - type: { - name: "Composite", - className: "ContainerListBlobHierarchySegmentExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const ContainerGetAccountInfoHeaders = { - serializedName: "Container_getAccountInfoHeaders", - type: { - name: "Composite", - className: "ContainerGetAccountInfoHeaders", - modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + properties: { + serializedName: "Properties", + xmlName: "Properties", type: { - name: "String" - } + name: "Composite", + className: "BlobPropertiesInternal", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + metadata: { + serializedName: "Metadata", + xmlName: "Metadata", type: { - name: "String" - } + name: "Dictionary", + value: { type: { name: "String" } }, + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + blobTags: { + serializedName: "BlobTags", + xmlName: "Tags", type: { - name: "String" - } + name: "Composite", + className: "BlobTags", + }, }, - date: { - serializedName: "date", - xmlName: "date", + objectReplicationMetadata: { + serializedName: "ObjectReplicationMetadata", + xmlName: "OrMetadata", type: { - name: "DateTimeRfc1123" - } + name: "Dictionary", + value: { type: { name: "String" } }, + }, }, - skuName: { - serializedName: "x-ms-sku-name", - xmlName: "x-ms-sku-name", + hasVersionsOnly: { + serializedName: "HasVersionsOnly", + xmlName: "HasVersionsOnly", type: { - name: "Enum", - allowedValues: [ - "Standard_LRS", - "Standard_GRS", - "Standard_RAGRS", - "Standard_ZRS", - "Premium_LRS" - ] - } + name: "Boolean", + }, }, - accountKind: { - serializedName: "x-ms-account-kind", - xmlName: "x-ms-account-kind", - type: { - name: "Enum", - allowedValues: [ - "Storage", - "BlobStorage", - "StorageV2", - "FileStorage", - "BlockBlobStorage" - ] - } - } - } - } + }, + }, }; -const ContainerGetAccountInfoExceptionHeaders = { - serializedName: "Container_getAccountInfoExceptionHeaders", +const BlobName = { + serializedName: "BlobName", type: { name: "Composite", - className: "ContainerGetAccountInfoExceptionHeaders", + className: "BlobName", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, type: { - name: "String" - } - } - } - } + name: "Boolean", + }, + }, + content: { + serializedName: "content", + xmlName: "content", + xmlIsMsText: true, + type: { + name: "String", + }, + }, + }, + }, }; -const BlobDownloadHeaders = { - serializedName: "Blob_downloadHeaders", +const BlobPropertiesInternal = { + serializedName: "BlobPropertiesInternal", + xmlName: "Properties", type: { name: "Composite", - className: "BlobDownloadHeaders", + className: "BlobPropertiesInternal", modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, createdOn: { - serializedName: "x-ms-creation-time", - xmlName: "x-ms-creation-time", - type: { - name: "DateTimeRfc1123" - } - }, - metadata: { - serializedName: "x-ms-meta", - xmlName: "x-ms-meta", + serializedName: "Creation-Time", + xmlName: "Creation-Time", type: { - name: "Dictionary", - value: { type: { name: "String" } } + name: "DateTimeRfc1123", }, - headerCollectionPrefix: "x-ms-meta-" }, - objectReplicationPolicyId: { - serializedName: "x-ms-or-policy-id", - xmlName: "x-ms-or-policy-id", + lastModified: { + serializedName: "Last-Modified", + required: true, + xmlName: "Last-Modified", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - objectReplicationRules: { - serializedName: "x-ms-or", - xmlName: "x-ms-or", + etag: { + serializedName: "Etag", + required: true, + xmlName: "Etag", type: { - name: "Dictionary", - value: { type: { name: "String" } } + name: "String", }, - headerCollectionPrefix: "x-ms-or-" }, contentLength: { - serializedName: "content-length", - xmlName: "content-length", + serializedName: "Content-Length", + xmlName: "Content-Length", type: { - name: "Number" - } + name: "Number", + }, }, contentType: { - serializedName: "content-type", - xmlName: "content-type", + serializedName: "Content-Type", + xmlName: "Content-Type", type: { - name: "String" - } + name: "String", + }, }, - contentRange: { - serializedName: "content-range", - xmlName: "content-range", + contentEncoding: { + serializedName: "Content-Encoding", + xmlName: "Content-Encoding", type: { - name: "String" - } + name: "String", + }, }, - etag: { - serializedName: "etag", - xmlName: "etag", + contentLanguage: { + serializedName: "Content-Language", + xmlName: "Content-Language", type: { - name: "String" - } + name: "String", + }, }, contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - contentEncoding: { - serializedName: "content-encoding", - xmlName: "content-encoding", - type: { - name: "String" - } - }, - cacheControl: { - serializedName: "cache-control", - xmlName: "cache-control", + serializedName: "Content-MD5", + xmlName: "Content-MD5", type: { - name: "String" - } + name: "ByteArray", + }, }, contentDisposition: { - serializedName: "content-disposition", - xmlName: "content-disposition", + serializedName: "Content-Disposition", + xmlName: "Content-Disposition", type: { - name: "String" - } + name: "String", + }, }, - contentLanguage: { - serializedName: "content-language", - xmlName: "content-language", + cacheControl: { + serializedName: "Cache-Control", + xmlName: "Cache-Control", type: { - name: "String" - } + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, blobType: { - serializedName: "x-ms-blob-type", - xmlName: "x-ms-blob-type", - type: { - name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } - }, - copyCompletedOn: { - serializedName: "x-ms-copy-completion-time", - xmlName: "x-ms-copy-completion-time", - type: { - name: "DateTimeRfc1123" - } - }, - copyStatusDescription: { - serializedName: "x-ms-copy-status-description", - xmlName: "x-ms-copy-status-description", - type: { - name: "String" - } - }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", - type: { - name: "String" - } - }, - copyProgress: { - serializedName: "x-ms-copy-progress", - xmlName: "x-ms-copy-progress", - type: { - name: "String" - } - }, - copySource: { - serializedName: "x-ms-copy-source", - xmlName: "x-ms-copy-source", - type: { - name: "String" - } - }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", + serializedName: "BlobType", + xmlName: "BlobType", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, - leaseDuration: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", + leaseStatus: { + serializedName: "LeaseStatus", + xmlName: "LeaseStatus", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["locked", "unlocked"], + }, }, leaseState: { - serializedName: "x-ms-lease-state", - xmlName: "x-ms-lease-state", + serializedName: "LeaseState", + xmlName: "LeaseState", type: { name: "Enum", allowedValues: [ @@ -3989,11207 +3299,11873 @@ const BlobDownloadHeaders = { "leased", "expired", "breaking", - "broken" - ] - } + "broken", + ], + }, }, - leaseStatus: { - serializedName: "x-ms-lease-status", - xmlName: "x-ms-lease-status", + leaseDuration: { + serializedName: "LeaseDuration", + xmlName: "LeaseDuration", type: { name: "Enum", - allowedValues: ["locked", "unlocked"] - } - }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } - }, - isCurrentVersion: { - serializedName: "x-ms-is-current-version", - xmlName: "x-ms-is-current-version", - type: { - name: "Boolean" - } - }, - acceptRanges: { - serializedName: "accept-ranges", - xmlName: "accept-ranges", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", - type: { - name: "Number" - } - }, - isServerEncrypted: { - serializedName: "x-ms-server-encrypted", - xmlName: "x-ms-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - }, - blobContentMD5: { - serializedName: "x-ms-blob-content-md5", - xmlName: "x-ms-blob-content-md5", - type: { - name: "ByteArray" - } - }, - tagCount: { - serializedName: "x-ms-tag-count", - xmlName: "x-ms-tag-count", - type: { - name: "Number" - } + allowedValues: ["infinite", "fixed"], + }, }, - isSealed: { - serializedName: "x-ms-blob-sealed", - xmlName: "x-ms-blob-sealed", + copyId: { + serializedName: "CopyId", + xmlName: "CopyId", type: { - name: "Boolean" - } + name: "String", + }, }, - lastAccessed: { - serializedName: "x-ms-last-access-time", - xmlName: "x-ms-last-access-time", + copyStatus: { + serializedName: "CopyStatus", + xmlName: "CopyStatus", type: { - name: "DateTimeRfc1123" - } + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, - immutabilityPolicyExpiresOn: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", + copySource: { + serializedName: "CopySource", + xmlName: "CopySource", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - immutabilityPolicyMode: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", + copyProgress: { + serializedName: "CopyProgress", + xmlName: "CopyProgress", type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } + name: "String", + }, }, - legalHold: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", + copyCompletedOn: { + serializedName: "CopyCompletionTime", + xmlName: "CopyCompletionTime", type: { - name: "Boolean" - } + name: "DateTimeRfc1123", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + copyStatusDescription: { + serializedName: "CopyStatusDescription", + xmlName: "CopyStatusDescription", type: { - name: "String" - } + name: "String", + }, }, - contentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + serverEncrypted: { + serializedName: "ServerEncrypted", + xmlName: "ServerEncrypted", type: { - name: "ByteArray" - } - } - } - } -}; -const BlobDownloadExceptionHeaders = { - serializedName: "Blob_downloadExceptionHeaders", - type: { - name: "Composite", - className: "BlobDownloadExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + name: "Boolean", + }, + }, + incrementalCopy: { + serializedName: "IncrementalCopy", + xmlName: "IncrementalCopy", type: { - name: "String" - } - } - } - } -}; -const BlobGetPropertiesHeaders = { - serializedName: "Blob_getPropertiesHeaders", - type: { - name: "Composite", - className: "BlobGetPropertiesHeaders", - modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + name: "Boolean", + }, + }, + destinationSnapshot: { + serializedName: "DestinationSnapshot", + xmlName: "DestinationSnapshot", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - createdOn: { - serializedName: "x-ms-creation-time", - xmlName: "x-ms-creation-time", + deletedOn: { + serializedName: "DeletedTime", + xmlName: "DeletedTime", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - metadata: { - serializedName: "x-ms-meta", - xmlName: "x-ms-meta", + remainingRetentionDays: { + serializedName: "RemainingRetentionDays", + xmlName: "RemainingRetentionDays", type: { - name: "Dictionary", - value: { type: { name: "String" } } + name: "Number", }, - headerCollectionPrefix: "x-ms-meta-" }, - objectReplicationPolicyId: { - serializedName: "x-ms-or-policy-id", - xmlName: "x-ms-or-policy-id", + accessTier: { + serializedName: "AccessTier", + xmlName: "AccessTier", type: { - name: "String" - } + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold", + ], + }, }, - objectReplicationRules: { - serializedName: "x-ms-or", - xmlName: "x-ms-or", + accessTierInferred: { + serializedName: "AccessTierInferred", + xmlName: "AccessTierInferred", type: { - name: "Dictionary", - value: { type: { name: "String" } } + name: "Boolean", }, - headerCollectionPrefix: "x-ms-or-" }, - blobType: { - serializedName: "x-ms-blob-type", - xmlName: "x-ms-blob-type", + archiveStatus: { + serializedName: "ArchiveStatus", + xmlName: "ArchiveStatus", type: { name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + allowedValues: [ + "rehydrate-pending-to-hot", + "rehydrate-pending-to-cool", + "rehydrate-pending-to-cold", + ], + }, }, - copyCompletedOn: { - serializedName: "x-ms-copy-completion-time", - xmlName: "x-ms-copy-completion-time", + customerProvidedKeySha256: { + serializedName: "CustomerProvidedKeySha256", + xmlName: "CustomerProvidedKeySha256", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - copyStatusDescription: { - serializedName: "x-ms-copy-status-description", - xmlName: "x-ms-copy-status-description", + encryptionScope: { + serializedName: "EncryptionScope", + xmlName: "EncryptionScope", type: { - name: "String" - } + name: "String", + }, }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + accessTierChangedOn: { + serializedName: "AccessTierChangeTime", + xmlName: "AccessTierChangeTime", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - copyProgress: { - serializedName: "x-ms-copy-progress", - xmlName: "x-ms-copy-progress", + tagCount: { + serializedName: "TagCount", + xmlName: "TagCount", type: { - name: "String" - } + name: "Number", + }, }, - copySource: { - serializedName: "x-ms-copy-source", - xmlName: "x-ms-copy-source", + expiresOn: { + serializedName: "Expiry-Time", + xmlName: "Expiry-Time", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", + isSealed: { + serializedName: "Sealed", + xmlName: "Sealed", + type: { + name: "Boolean", + }, + }, + rehydratePriority: { + serializedName: "RehydratePriority", + xmlName: "RehydratePriority", type: { name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + allowedValues: ["High", "Standard"], + }, }, - isIncrementalCopy: { - serializedName: "x-ms-incremental-copy", - xmlName: "x-ms-incremental-copy", + lastAccessedOn: { + serializedName: "LastAccessTime", + xmlName: "LastAccessTime", type: { - name: "Boolean" - } + name: "DateTimeRfc1123", + }, }, - destinationSnapshot: { - serializedName: "x-ms-copy-destination-snapshot", - xmlName: "x-ms-copy-destination-snapshot", + immutabilityPolicyExpiresOn: { + serializedName: "ImmutabilityPolicyUntilDate", + xmlName: "ImmutabilityPolicyUntilDate", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - leaseDuration: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", + immutabilityPolicyMode: { + serializedName: "ImmutabilityPolicyMode", + xmlName: "ImmutabilityPolicyMode", type: { name: "Enum", - allowedValues: ["infinite", "fixed"] - } + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, }, - leaseState: { - serializedName: "x-ms-lease-state", - xmlName: "x-ms-lease-state", + legalHold: { + serializedName: "LegalHold", + xmlName: "LegalHold", type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } + name: "Boolean", + }, }, - leaseStatus: { - serializedName: "x-ms-lease-status", - xmlName: "x-ms-lease-status", + }, + }, +}; +const ListBlobsHierarchySegmentResponse = { + serializedName: "ListBlobsHierarchySegmentResponse", + xmlName: "EnumerationResults", + type: { + name: "Composite", + className: "ListBlobsHierarchySegmentResponse", + modelProperties: { + serviceEndpoint: { + serializedName: "ServiceEndpoint", + required: true, + xmlName: "ServiceEndpoint", + xmlIsAttribute: true, type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] - } + name: "String", + }, }, - contentLength: { - serializedName: "content-length", - xmlName: "content-length", + containerName: { + serializedName: "ContainerName", + required: true, + xmlName: "ContainerName", + xmlIsAttribute: true, type: { - name: "Number" - } + name: "String", + }, }, - contentType: { - serializedName: "content-type", - xmlName: "content-type", + prefix: { + serializedName: "Prefix", + xmlName: "Prefix", type: { - name: "String" - } + name: "String", + }, }, - etag: { - serializedName: "etag", - xmlName: "etag", + marker: { + serializedName: "Marker", + xmlName: "Marker", + type: { + name: "String", + }, + }, + maxPageSize: { + serializedName: "MaxResults", + xmlName: "MaxResults", + type: { + name: "Number", + }, + }, + delimiter: { + serializedName: "Delimiter", + xmlName: "Delimiter", type: { - name: "String" - } + name: "String", + }, }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + segment: { + serializedName: "Segment", + xmlName: "Blobs", type: { - name: "ByteArray" - } + name: "Composite", + className: "BlobHierarchyListSegment", + }, }, - contentEncoding: { - serializedName: "content-encoding", - xmlName: "content-encoding", + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", type: { - name: "String" - } + name: "String", + }, }, - contentDisposition: { - serializedName: "content-disposition", - xmlName: "content-disposition", + }, + }, +}; +const BlobHierarchyListSegment = { + serializedName: "BlobHierarchyListSegment", + xmlName: "Blobs", + type: { + name: "Composite", + className: "BlobHierarchyListSegment", + modelProperties: { + blobPrefixes: { + serializedName: "BlobPrefixes", + xmlName: "BlobPrefixes", + xmlElementName: "BlobPrefix", type: { - name: "String" - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobPrefix", + }, + }, + }, }, - contentLanguage: { - serializedName: "content-language", - xmlName: "content-language", + blobItems: { + serializedName: "BlobItems", + required: true, + xmlName: "BlobItems", + xmlElementName: "Blob", type: { - name: "String" - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "BlobItemInternal", + }, + }, + }, }, - cacheControl: { - serializedName: "cache-control", - xmlName: "cache-control", + }, + }, +}; +const BlobPrefix = { + serializedName: "BlobPrefix", + type: { + name: "Composite", + className: "BlobPrefix", + modelProperties: { + name: { + serializedName: "Name", + xmlName: "Name", type: { - name: "String" - } + name: "Composite", + className: "BlobName", + }, }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + }, + }, +}; +const BlockLookupList = { + serializedName: "BlockLookupList", + xmlName: "BlockList", + type: { + name: "Composite", + className: "BlockLookupList", + modelProperties: { + committed: { + serializedName: "Committed", + xmlName: "Committed", + xmlElementName: "Committed", type: { - name: "Number" - } + name: "Sequence", + element: { + type: { + name: "String", + }, + }, + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + uncommitted: { + serializedName: "Uncommitted", + xmlName: "Uncommitted", + xmlElementName: "Uncommitted", type: { - name: "String" - } + name: "Sequence", + element: { + type: { + name: "String", + }, + }, + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + latest: { + serializedName: "Latest", + xmlName: "Latest", + xmlElementName: "Latest", type: { - name: "String" - } + name: "Sequence", + element: { + type: { + name: "String", + }, + }, + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + }, + }, +}; +const BlockList = { + serializedName: "BlockList", + type: { + name: "Composite", + className: "BlockList", + modelProperties: { + committedBlocks: { + serializedName: "CommittedBlocks", + xmlName: "CommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", type: { - name: "String" - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block", + }, + }, + }, }, - date: { - serializedName: "date", - xmlName: "date", + uncommittedBlocks: { + serializedName: "UncommittedBlocks", + xmlName: "UncommittedBlocks", + xmlIsWrapped: true, + xmlElementName: "Block", type: { - name: "DateTimeRfc1123" - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "Block", + }, + }, + }, }, - acceptRanges: { - serializedName: "accept-ranges", - xmlName: "accept-ranges", + }, + }, +}; +const Block = { + serializedName: "Block", + type: { + name: "Composite", + className: "Block", + modelProperties: { + name: { + serializedName: "Name", + required: true, + xmlName: "Name", type: { - name: "String" - } + name: "String", + }, }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", + size: { + serializedName: "Size", + required: true, + xmlName: "Size", type: { - name: "Number" - } + name: "Number", + }, }, - isServerEncrypted: { - serializedName: "x-ms-server-encrypted", - xmlName: "x-ms-server-encrypted", + }, + }, +}; +const PageList = { + serializedName: "PageList", + type: { + name: "Composite", + className: "PageList", + modelProperties: { + pageRange: { + serializedName: "PageRange", + xmlName: "PageRange", + xmlElementName: "PageRange", type: { - name: "Boolean" - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "PageRange", + }, + }, + }, }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + clearRange: { + serializedName: "ClearRange", + xmlName: "ClearRange", + xmlElementName: "ClearRange", type: { - name: "String" - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ClearRange", + }, + }, + }, }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + continuationToken: { + serializedName: "NextMarker", + xmlName: "NextMarker", type: { - name: "String" - } + name: "String", + }, }, - accessTier: { - serializedName: "x-ms-access-tier", - xmlName: "x-ms-access-tier", + }, + }, +}; +const PageRange = { + serializedName: "PageRange", + xmlName: "PageRange", + type: { + name: "Composite", + className: "PageRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", type: { - name: "String" - } + name: "Number", + }, }, - accessTierInferred: { - serializedName: "x-ms-access-tier-inferred", - xmlName: "x-ms-access-tier-inferred", + end: { + serializedName: "End", + required: true, + xmlName: "End", type: { - name: "Boolean" - } + name: "Number", + }, }, - archiveStatus: { - serializedName: "x-ms-archive-status", - xmlName: "x-ms-archive-status", + }, + }, +}; +const ClearRange = { + serializedName: "ClearRange", + xmlName: "ClearRange", + type: { + name: "Composite", + className: "ClearRange", + modelProperties: { + start: { + serializedName: "Start", + required: true, + xmlName: "Start", type: { - name: "String" - } + name: "Number", + }, }, - accessTierChangedOn: { - serializedName: "x-ms-access-tier-change-time", - xmlName: "x-ms-access-tier-change-time", + end: { + serializedName: "End", + required: true, + xmlName: "End", type: { - name: "DateTimeRfc1123" - } + name: "Number", + }, }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", + }, + }, +}; +const QueryRequest = { + serializedName: "QueryRequest", + xmlName: "QueryRequest", + type: { + name: "Composite", + className: "QueryRequest", + modelProperties: { + queryType: { + serializedName: "QueryType", + required: true, + xmlName: "QueryType", type: { - name: "String" - } + name: "String", + }, }, - isCurrentVersion: { - serializedName: "x-ms-is-current-version", - xmlName: "x-ms-is-current-version", + expression: { + serializedName: "Expression", + required: true, + xmlName: "Expression", type: { - name: "Boolean" - } + name: "String", + }, }, - tagCount: { - serializedName: "x-ms-tag-count", - xmlName: "x-ms-tag-count", + inputSerialization: { + serializedName: "InputSerialization", + xmlName: "InputSerialization", type: { - name: "Number" - } + name: "Composite", + className: "QuerySerialization", + }, }, - expiresOn: { - serializedName: "x-ms-expiry-time", - xmlName: "x-ms-expiry-time", + outputSerialization: { + serializedName: "OutputSerialization", + xmlName: "OutputSerialization", type: { - name: "DateTimeRfc1123" - } + name: "Composite", + className: "QuerySerialization", + }, }, - isSealed: { - serializedName: "x-ms-blob-sealed", - xmlName: "x-ms-blob-sealed", + }, + }, +}; +const QuerySerialization = { + serializedName: "QuerySerialization", + type: { + name: "Composite", + className: "QuerySerialization", + modelProperties: { + format: { + serializedName: "Format", + xmlName: "Format", type: { - name: "Boolean" - } + name: "Composite", + className: "QueryFormat", + }, }, - rehydratePriority: { - serializedName: "x-ms-rehydrate-priority", - xmlName: "x-ms-rehydrate-priority", + }, + }, +}; +const QueryFormat = { + serializedName: "QueryFormat", + type: { + name: "Composite", + className: "QueryFormat", + modelProperties: { + type: { + serializedName: "Type", + required: true, + xmlName: "Type", type: { name: "Enum", - allowedValues: ["High", "Standard"] - } + allowedValues: ["delimited", "json", "arrow", "parquet"], + }, }, - lastAccessed: { - serializedName: "x-ms-last-access-time", - xmlName: "x-ms-last-access-time", + delimitedTextConfiguration: { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", type: { - name: "DateTimeRfc1123" - } + name: "Composite", + className: "DelimitedTextConfiguration", + }, }, - immutabilityPolicyExpiresOn: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", + jsonTextConfiguration: { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", type: { - name: "DateTimeRfc1123" - } + name: "Composite", + className: "JsonTextConfiguration", + }, }, - immutabilityPolicyMode: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", + arrowConfiguration: { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } + name: "Composite", + className: "ArrowConfiguration", + }, }, - legalHold: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", + parquetTextConfiguration: { + serializedName: "ParquetTextConfiguration", + xmlName: "ParquetTextConfiguration", type: { - name: "Boolean" - } + name: "Dictionary", + value: { type: { name: "any" } }, + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const BlobGetPropertiesExceptionHeaders = { - serializedName: "Blob_getPropertiesExceptionHeaders", - type: { - name: "Composite", - className: "BlobGetPropertiesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const BlobDeleteHeaders = { - serializedName: "Blob_deleteHeaders", +const DelimitedTextConfiguration = { + serializedName: "DelimitedTextConfiguration", + xmlName: "DelimitedTextConfiguration", type: { name: "Composite", - className: "BlobDeleteHeaders", + className: "DelimitedTextConfiguration", modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + columnSeparator: { + serializedName: "ColumnSeparator", + xmlName: "ColumnSeparator", type: { - name: "String" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + fieldQuote: { + serializedName: "FieldQuote", + xmlName: "FieldQuote", type: { - name: "String" - } + name: "String", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", + escapeChar: { + serializedName: "EscapeChar", + xmlName: "EscapeChar", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + headersPresent: { + serializedName: "HeadersPresent", + xmlName: "HasHeaders", type: { - name: "String" - } - } - } - } + name: "Boolean", + }, + }, + }, + }, +}; +const JsonTextConfiguration = { + serializedName: "JsonTextConfiguration", + xmlName: "JsonTextConfiguration", + type: { + name: "Composite", + className: "JsonTextConfiguration", + modelProperties: { + recordSeparator: { + serializedName: "RecordSeparator", + xmlName: "RecordSeparator", + type: { + name: "String", + }, + }, + }, + }, }; -const BlobDeleteExceptionHeaders = { - serializedName: "Blob_deleteExceptionHeaders", +const ArrowConfiguration = { + serializedName: "ArrowConfiguration", + xmlName: "ArrowConfiguration", type: { name: "Composite", - className: "BlobDeleteExceptionHeaders", + className: "ArrowConfiguration", modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + schema: { + serializedName: "Schema", + required: true, + xmlName: "Schema", + xmlIsWrapped: true, + xmlElementName: "Field", type: { - name: "String" - } - } - } - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "ArrowField", + }, + }, + }, + }, + }, + }, }; -const BlobUndeleteHeaders = { - serializedName: "Blob_undeleteHeaders", +const ArrowField = { + serializedName: "ArrowField", + xmlName: "Field", type: { name: "Composite", - className: "BlobUndeleteHeaders", + className: "ArrowField", modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + type: { + serializedName: "Type", + required: true, + xmlName: "Type", type: { - name: "String" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + name: { + serializedName: "Name", + xmlName: "Name", type: { - name: "String" - } + name: "String", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + precision: { + serializedName: "Precision", + xmlName: "Precision", type: { - name: "String" - } + name: "Number", + }, }, - date: { - serializedName: "date", - xmlName: "date", + scale: { + serializedName: "Scale", + xmlName: "Scale", type: { - name: "DateTimeRfc1123" - } + name: "Number", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const BlobUndeleteExceptionHeaders = { - serializedName: "Blob_undeleteExceptionHeaders", - type: { - name: "Composite", - className: "BlobUndeleteExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const BlobSetExpiryHeaders = { - serializedName: "Blob_setExpiryHeaders", +const ServiceSetPropertiesHeaders = { + serializedName: "Service_setPropertiesHeaders", type: { name: "Composite", - className: "BlobSetExpiryHeaders", + className: "ServiceSetPropertiesHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobSetExpiryExceptionHeaders = { - serializedName: "Blob_setExpiryExceptionHeaders", +const ServiceSetPropertiesExceptionHeaders = { + serializedName: "Service_setPropertiesExceptionHeaders", type: { name: "Composite", - className: "BlobSetExpiryExceptionHeaders", + className: "ServiceSetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobSetHttpHeadersHeaders = { - serializedName: "Blob_setHttpHeadersHeaders", +const ServiceGetPropertiesHeaders = { + serializedName: "Service_getPropertiesHeaders", type: { name: "Composite", - className: "BlobSetHttpHeadersHeaders", + className: "ServiceGetPropertiesHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobSetHttpHeadersExceptionHeaders = { - serializedName: "Blob_setHttpHeadersExceptionHeaders", +const ServiceGetPropertiesExceptionHeaders = { + serializedName: "Service_getPropertiesExceptionHeaders", type: { name: "Composite", - className: "BlobSetHttpHeadersExceptionHeaders", + className: "ServiceGetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobSetImmutabilityPolicyHeaders = { - serializedName: "Blob_setImmutabilityPolicyHeaders", +const ServiceGetStatisticsHeaders = { + serializedName: "Service_getStatisticsHeaders", type: { name: "Composite", - className: "BlobSetImmutabilityPolicyHeaders", + className: "ServiceGetStatisticsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - immutabilityPolicyExpiry: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - immutabilityPolicyMode: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", - type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } - } - } - } + }, + }, }; -const BlobSetImmutabilityPolicyExceptionHeaders = { - serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", +const ServiceGetStatisticsExceptionHeaders = { + serializedName: "Service_getStatisticsExceptionHeaders", type: { name: "Composite", - className: "BlobSetImmutabilityPolicyExceptionHeaders", + className: "ServiceGetStatisticsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + type: { + name: "String", + }, + }, + }, + }, }; -const BlobDeleteImmutabilityPolicyHeaders = { - serializedName: "Blob_deleteImmutabilityPolicyHeaders", +const ServiceListContainersSegmentHeaders = { + serializedName: "Service_listContainersSegmentHeaders", type: { name: "Composite", - className: "BlobDeleteImmutabilityPolicyHeaders", + className: "ServiceListContainersSegmentHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobDeleteImmutabilityPolicyExceptionHeaders = { - serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", +const ServiceListContainersSegmentExceptionHeaders = { + serializedName: "Service_listContainersSegmentExceptionHeaders", type: { name: "Composite", - className: "BlobDeleteImmutabilityPolicyExceptionHeaders", + className: "ServiceListContainersSegmentExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobSetLegalHoldHeaders = { - serializedName: "Blob_setLegalHoldHeaders", +const ServiceGetUserDelegationKeyHeaders = { + serializedName: "Service_getUserDelegationKeyHeaders", type: { name: "Composite", - className: "BlobSetLegalHoldHeaders", + className: "ServiceGetUserDelegationKeyHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - legalHold: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Boolean" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobSetLegalHoldExceptionHeaders = { - serializedName: "Blob_setLegalHoldExceptionHeaders", +const ServiceGetUserDelegationKeyExceptionHeaders = { + serializedName: "Service_getUserDelegationKeyExceptionHeaders", type: { name: "Composite", - className: "BlobSetLegalHoldExceptionHeaders", + className: "ServiceGetUserDelegationKeyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobSetMetadataHeaders = { - serializedName: "Blob_setMetadataHeaders", +const ServiceGetAccountInfoHeaders = { + serializedName: "Service_getAccountInfoHeaders", type: { name: "Composite", - className: "BlobSetMetadataHeaders", + className: "ServiceGetAccountInfoHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", type: { - name: "Boolean" - } + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS", + ], + }, }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", type: { - name: "String" - } + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage", + ], + }, }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + isHierarchicalNamespaceEnabled: { + serializedName: "x-ms-is-hns-enabled", + xmlName: "x-ms-is-hns-enabled", type: { - name: "String" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobSetMetadataExceptionHeaders = { - serializedName: "Blob_setMetadataExceptionHeaders", +const ServiceGetAccountInfoExceptionHeaders = { + serializedName: "Service_getAccountInfoExceptionHeaders", type: { name: "Composite", - className: "BlobSetMetadataExceptionHeaders", + className: "ServiceGetAccountInfoExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobAcquireLeaseHeaders = { - serializedName: "Blob_acquireLeaseHeaders", +const ServiceSubmitBatchHeaders = { + serializedName: "Service_submitBatchHeaders", type: { name: "Composite", - className: "BlobAcquireLeaseHeaders", + className: "ServiceSubmitBatchHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + contentType: { + serializedName: "content-type", + xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceSubmitBatchExceptionHeaders = { + serializedName: "Service_submitBatchExceptionHeaders", + type: { + name: "Composite", + className: "ServiceSubmitBatchExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const ServiceFilterBlobsHeaders = { + serializedName: "Service_filterBlobsHeaders", + type: { + name: "Composite", + className: "ServiceFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const BlobAcquireLeaseExceptionHeaders = { - serializedName: "Blob_acquireLeaseExceptionHeaders", +const ServiceFilterBlobsExceptionHeaders = { + serializedName: "Service_filterBlobsExceptionHeaders", type: { name: "Composite", - className: "BlobAcquireLeaseExceptionHeaders", + className: "ServiceFilterBlobsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobReleaseLeaseHeaders = { - serializedName: "Blob_releaseLeaseHeaders", +const ContainerCreateHeaders = { + serializedName: "Container_createHeaders", type: { name: "Composite", - className: "BlobReleaseLeaseHeaders", + className: "ContainerCreateHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const BlobReleaseLeaseExceptionHeaders = { - serializedName: "Blob_releaseLeaseExceptionHeaders", +const ContainerCreateExceptionHeaders = { + serializedName: "Container_createExceptionHeaders", type: { name: "Composite", - className: "BlobReleaseLeaseExceptionHeaders", + className: "ContainerCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobRenewLeaseHeaders = { - serializedName: "Blob_renewLeaseHeaders", +const ContainerGetPropertiesHeaders = { + serializedName: "Container_getPropertiesHeaders", type: { name: "Composite", - className: "BlobRenewLeaseHeaders", + className: "ContainerGetPropertiesHeaders", modelProperties: { + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", type: { - name: "String" - } + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", + type: { + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, + hasImmutabilityPolicy: { + serializedName: "x-ms-has-immutability-policy", + xmlName: "x-ms-has-immutability-policy", + type: { + name: "Boolean", + }, + }, + hasLegalHold: { + serializedName: "x-ms-has-legal-hold", + xmlName: "x-ms-has-legal-hold", + type: { + name: "Boolean", + }, + }, + defaultEncryptionScope: { + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", + type: { + name: "String", + }, + }, + denyEncryptionScopeOverride: { + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", + type: { + name: "Boolean", + }, + }, + isImmutableStorageWithVersioningEnabled: { + serializedName: "x-ms-immutable-storage-with-versioning-enabled", + xmlName: "x-ms-immutable-storage-with-versioning-enabled", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const BlobRenewLeaseExceptionHeaders = { - serializedName: "Blob_renewLeaseExceptionHeaders", +const ContainerGetPropertiesExceptionHeaders = { + serializedName: "Container_getPropertiesExceptionHeaders", type: { name: "Composite", - className: "BlobRenewLeaseExceptionHeaders", + className: "ContainerGetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobChangeLeaseHeaders = { - serializedName: "Blob_changeLeaseHeaders", +const ContainerDeleteHeaders = { + serializedName: "Container_deleteHeaders", type: { name: "Composite", - className: "BlobChangeLeaseHeaders", + className: "ContainerDeleteHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } - }, - leaseId: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const BlobChangeLeaseExceptionHeaders = { - serializedName: "Blob_changeLeaseExceptionHeaders", +const ContainerDeleteExceptionHeaders = { + serializedName: "Container_deleteExceptionHeaders", type: { name: "Composite", - className: "BlobChangeLeaseExceptionHeaders", + className: "ContainerDeleteExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobBreakLeaseHeaders = { - serializedName: "Blob_breakLeaseHeaders", +const ContainerSetMetadataHeaders = { + serializedName: "Container_setMetadataHeaders", type: { name: "Composite", - className: "BlobBreakLeaseHeaders", + className: "ContainerSetMetadataHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - leaseTime: { - serializedName: "x-ms-lease-time", - xmlName: "x-ms-lease-time", + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "Number" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - } - } - } + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const BlobBreakLeaseExceptionHeaders = { - serializedName: "Blob_breakLeaseExceptionHeaders", +const ContainerSetMetadataExceptionHeaders = { + serializedName: "Container_setMetadataExceptionHeaders", type: { name: "Composite", - className: "BlobBreakLeaseExceptionHeaders", + className: "ContainerSetMetadataExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobCreateSnapshotHeaders = { - serializedName: "Blob_createSnapshotHeaders", +const ContainerGetAccessPolicyHeaders = { + serializedName: "Container_getAccessPolicyHeaders", type: { name: "Composite", - className: "BlobCreateSnapshotHeaders", + className: "ContainerGetAccessPolicyHeaders", modelProperties: { - snapshot: { - serializedName: "x-ms-snapshot", - xmlName: "x-ms-snapshot", + blobPublicAccess: { + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", type: { - name: "String" - } + name: "Enum", + allowedValues: ["container", "blob"], + }, }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobCreateSnapshotExceptionHeaders = { - serializedName: "Blob_createSnapshotExceptionHeaders", +const ContainerGetAccessPolicyExceptionHeaders = { + serializedName: "Container_getAccessPolicyExceptionHeaders", type: { name: "Composite", - className: "BlobCreateSnapshotExceptionHeaders", + className: "ContainerGetAccessPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobStartCopyFromURLHeaders = { - serializedName: "Blob_startCopyFromURLHeaders", +const ContainerSetAccessPolicyHeaders = { + serializedName: "Container_setAccessPolicyHeaders", type: { name: "Composite", - className: "BlobStartCopyFromURLHeaders", + className: "ContainerSetAccessPolicyHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", - type: { - name: "String" - } - }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", - type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobStartCopyFromURLExceptionHeaders = { - serializedName: "Blob_startCopyFromURLExceptionHeaders", +const ContainerSetAccessPolicyExceptionHeaders = { + serializedName: "Container_setAccessPolicyExceptionHeaders", type: { name: "Composite", - className: "BlobStartCopyFromURLExceptionHeaders", + className: "ContainerSetAccessPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobCopyFromURLHeaders = { - serializedName: "Blob_copyFromURLHeaders", +const ContainerRestoreHeaders = { + serializedName: "Container_restoreHeaders", type: { name: "Composite", - className: "BlobCopyFromURLHeaders", + className: "ContainerRestoreHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", - type: { - name: "String" - } - }, - copyStatus: { - defaultValue: "success", - isConstant: true, - serializedName: "x-ms-copy-status", - type: { - name: "String" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobCopyFromURLExceptionHeaders = { - serializedName: "Blob_copyFromURLExceptionHeaders", +const ContainerRestoreExceptionHeaders = { + serializedName: "Container_restoreExceptionHeaders", type: { name: "Composite", - className: "BlobCopyFromURLExceptionHeaders", + className: "ContainerRestoreExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobAbortCopyFromURLHeaders = { - serializedName: "Blob_abortCopyFromURLHeaders", +const ContainerRenameHeaders = { + serializedName: "Container_renameHeaders", type: { name: "Composite", - className: "BlobAbortCopyFromURLHeaders", + className: "ContainerRenameHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobAbortCopyFromURLExceptionHeaders = { - serializedName: "Blob_abortCopyFromURLExceptionHeaders", +const ContainerRenameExceptionHeaders = { + serializedName: "Container_renameExceptionHeaders", type: { name: "Composite", - className: "BlobAbortCopyFromURLExceptionHeaders", + className: "ContainerRenameExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobSetTierHeaders = { - serializedName: "Blob_setTierHeaders", +const ContainerSubmitBatchHeaders = { + serializedName: "Container_submitBatchHeaders", type: { name: "Composite", - className: "BlobSetTierHeaders", + className: "ContainerSubmitBatchHeaders", modelProperties: { - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + contentType: { + serializedName: "content-type", + xmlName: "content-type", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const BlobSetTierExceptionHeaders = { - serializedName: "Blob_setTierExceptionHeaders", +const ContainerSubmitBatchExceptionHeaders = { + serializedName: "Container_submitBatchExceptionHeaders", type: { name: "Composite", - className: "BlobSetTierExceptionHeaders", + className: "ContainerSubmitBatchExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobGetAccountInfoHeaders = { - serializedName: "Blob_getAccountInfoHeaders", +const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", type: { name: "Composite", - className: "BlobGetAccountInfoHeaders", + className: "ContainerFilterBlobsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - skuName: { - serializedName: "x-ms-sku-name", - xmlName: "x-ms-sku-name", - type: { - name: "Enum", - allowedValues: [ - "Standard_LRS", - "Standard_GRS", - "Standard_RAGRS", - "Standard_ZRS", - "Premium_LRS" - ] - } + name: "DateTimeRfc1123", + }, }, - accountKind: { - serializedName: "x-ms-account-kind", - xmlName: "x-ms-account-kind", - type: { - name: "Enum", - allowedValues: [ - "Storage", - "BlobStorage", - "StorageV2", - "FileStorage", - "BlockBlobStorage" - ] - } - } - } - } + }, + }, }; -const BlobGetAccountInfoExceptionHeaders = { - serializedName: "Blob_getAccountInfoExceptionHeaders", +const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", type: { name: "Composite", - className: "BlobGetAccountInfoExceptionHeaders", + className: "ContainerFilterBlobsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobQueryHeaders = { - serializedName: "Blob_queryHeaders", +const ContainerAcquireLeaseHeaders = { + serializedName: "Container_acquireLeaseHeaders", type: { name: "Composite", - className: "BlobQueryHeaders", + className: "ContainerAcquireLeaseHeaders", modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - metadata: { - serializedName: "x-ms-meta", - xmlName: "x-ms-meta", + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", type: { - name: "Dictionary", - value: { type: { name: "String" } } - } + name: "String", + }, }, - contentLength: { - serializedName: "content-length", - xmlName: "content-length", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "Number" - } + name: "String", + }, }, - contentType: { - serializedName: "content-type", - xmlName: "content-type", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, - contentRange: { - serializedName: "content-range", - xmlName: "content-range", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerAcquireLeaseExceptionHeaders = { + serializedName: "Container_acquireLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerAcquireLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, }, + }, + }, +}; +const ContainerReleaseLeaseHeaders = { + serializedName: "Container_releaseLeaseHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseHeaders", + modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "ByteArray" - } + name: "DateTimeRfc1123", + }, }, - contentEncoding: { - serializedName: "content-encoding", - xmlName: "content-encoding", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, - cacheControl: { - serializedName: "cache-control", - xmlName: "cache-control", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, - contentDisposition: { - serializedName: "content-disposition", - xmlName: "content-disposition", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, - contentLanguage: { - serializedName: "content-language", - xmlName: "content-language", + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, +}; +const ContainerReleaseLeaseExceptionHeaders = { + serializedName: "Container_releaseLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerReleaseLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "String" - } + name: "String", + }, }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + }, + }, +}; +const ContainerRenewLeaseHeaders = { + serializedName: "Container_renewLeaseHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", type: { - name: "Number" - } + name: "String", + }, }, - blobType: { - serializedName: "x-ms-blob-type", - xmlName: "x-ms-blob-type", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "Enum", - allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"] - } + name: "DateTimeRfc1123", + }, }, - copyCompletionTime: { - serializedName: "x-ms-copy-completion-time", - xmlName: "x-ms-copy-completion-time", + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - copyStatusDescription: { - serializedName: "x-ms-copy-status-description", - xmlName: "x-ms-copy-status-description", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, - copyProgress: { - serializedName: "x-ms-copy-progress", - xmlName: "x-ms-copy-progress", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, - copySource: { - serializedName: "x-ms-copy-source", - xmlName: "x-ms-copy-source", + date: { + serializedName: "date", + xmlName: "date", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", + }, + }, +}; +const ContainerRenewLeaseExceptionHeaders = { + serializedName: "Container_renewLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerRenewLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + name: "String", + }, }, - leaseDuration: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", + }, + }, +}; +const ContainerBreakLeaseHeaders = { + serializedName: "Container_breakLeaseHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", type: { - name: "Enum", - allowedValues: ["infinite", "fixed"] - } + name: "String", + }, }, - leaseState: { - serializedName: "x-ms-lease-state", - xmlName: "x-ms-lease-state", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "Enum", - allowedValues: [ - "available", - "leased", - "expired", - "breaking", - "broken" - ] - } + name: "DateTimeRfc1123", + }, }, - leaseStatus: { - serializedName: "x-ms-lease-status", - xmlName: "x-ms-lease-status", + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", type: { - name: "Enum", - allowedValues: ["locked", "unlocked"] - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } - }, - acceptRanges: { - serializedName: "accept-ranges", - xmlName: "accept-ranges", - type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", + }, + }, +}; +const ContainerBreakLeaseExceptionHeaders = { + serializedName: "Container_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "ContainerBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Number" - } + name: "String", + }, }, - isServerEncrypted: { - serializedName: "x-ms-server-encrypted", - xmlName: "x-ms-server-encrypted", + }, + }, +}; +const ContainerChangeLeaseHeaders = { + serializedName: "Container_changeLeaseHeaders", + type: { + name: "Composite", + className: "ContainerChangeLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", type: { - name: "Boolean" - } + name: "String", + }, }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", type: { - name: "String" - } + name: "String", + }, }, - blobContentMD5: { - serializedName: "x-ms-blob-content-md5", - xmlName: "x-ms-blob-content-md5", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "ByteArray" - } + name: "String", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, - contentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "ByteArray" - } - } - } - } + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, }; -const BlobQueryExceptionHeaders = { - serializedName: "Blob_queryExceptionHeaders", +const ContainerChangeLeaseExceptionHeaders = { + serializedName: "Container_changeLeaseExceptionHeaders", type: { name: "Composite", - className: "BlobQueryExceptionHeaders", + className: "ContainerChangeLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobGetTagsHeaders = { - serializedName: "Blob_getTagsHeaders", +const ContainerListBlobFlatSegmentHeaders = { + serializedName: "Container_listBlobFlatSegmentHeaders", type: { name: "Composite", - className: "BlobGetTagsHeaders", + className: "ContainerListBlobFlatSegmentHeaders", modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobGetTagsExceptionHeaders = { - serializedName: "Blob_getTagsExceptionHeaders", +const ContainerListBlobFlatSegmentExceptionHeaders = { + serializedName: "Container_listBlobFlatSegmentExceptionHeaders", type: { name: "Composite", - className: "BlobGetTagsExceptionHeaders", + className: "ContainerListBlobFlatSegmentExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobSetTagsHeaders = { - serializedName: "Blob_setTagsHeaders", +const ContainerListBlobHierarchySegmentHeaders = { + serializedName: "Container_listBlobHierarchySegmentHeaders", type: { name: "Composite", - className: "BlobSetTagsHeaders", + className: "ContainerListBlobHierarchySegmentHeaders", modelProperties: { + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlobSetTagsExceptionHeaders = { - serializedName: "Blob_setTagsExceptionHeaders", +const ContainerListBlobHierarchySegmentExceptionHeaders = { + serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", type: { name: "Composite", - className: "BlobSetTagsExceptionHeaders", + className: "ContainerListBlobHierarchySegmentExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const PageBlobCreateHeaders = { - serializedName: "PageBlob_createHeaders", +const ContainerGetAccountInfoHeaders = { + serializedName: "Container_getAccountInfoHeaders", type: { name: "Composite", - className: "PageBlobCreateHeaders", + className: "ContainerGetAccountInfoHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } + name: "DateTimeRfc1123", + }, }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", type: { - name: "String" - } + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS", + ], + }, }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", type: { - name: "String" - } + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage", + ], + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const PageBlobCreateExceptionHeaders = { - serializedName: "PageBlob_createExceptionHeaders", +const ContainerGetAccountInfoExceptionHeaders = { + serializedName: "Container_getAccountInfoExceptionHeaders", type: { name: "Composite", - className: "PageBlobCreateExceptionHeaders", + className: "ContainerGetAccountInfoExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const PageBlobUploadPagesHeaders = { - serializedName: "PageBlob_uploadPagesHeaders", +const BlobDownloadHeaders = { + serializedName: "Blob_downloadHeaders", type: { name: "Composite", - className: "PageBlobUploadPagesHeaders", + className: "BlobDownloadHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, + }, + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", + type: { + name: "DateTimeRfc1123", + }, + }, + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", + type: { + name: "String", + }, + }, + objectReplicationRules: { + serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", + xmlName: "x-ms-or", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number", + }, + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } + name: "ByteArray", + }, }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", type: { - name: "Number" - } + name: "String", + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", type: { - name: "String" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", type: { - name: "String" - } + name: "String", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", type: { - name: "DateTimeRfc1123" - } + name: "Number", + }, }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", type: { - name: "Boolean" - } + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", type: { - name: "String" - } + name: "String", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", type: { - name: "String" - } - } - } - } -}; -const PageBlobUploadPagesExceptionHeaders = { - serializedName: "PageBlob_uploadPagesExceptionHeaders", - type: { - name: "Composite", - className: "PageBlobUploadPagesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + name: "String", + }, + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", type: { - name: "String" - } - } - } - } -}; -const PageBlobClearPagesHeaders = { - serializedName: "PageBlob_clearPagesHeaders", - type: { - name: "Composite", - className: "PageBlobClearPagesHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + name: "String", + }, + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", type: { - name: "String" - } + name: "String", + }, }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", type: { - name: "DateTimeRfc1123" - } + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", type: { - name: "ByteArray" - } + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", type: { - name: "ByteArray" - } + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", type: { - name: "Number" - } + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, - date: { - serializedName: "date", - xmlName: "date", + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", type: { - name: "String" - } - } - } - } -}; -const PageBlobClearPagesExceptionHeaders = { - serializedName: "PageBlob_clearPagesExceptionHeaders", - type: { - name: "Composite", - className: "PageBlobClearPagesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + name: "Boolean", + }, + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", type: { - name: "String" - } - } - } - } -}; -const PageBlobUploadPagesFromURLHeaders = { - serializedName: "PageBlob_uploadPagesFromURLHeaders", - type: { - name: "Composite", - className: "PageBlobUploadPagesFromURLHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", type: { - name: "DateTimeRfc1123" - } + name: "Number", + }, }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", type: { - name: "ByteArray" - } + name: "Boolean", + }, }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", type: { - name: "ByteArray" - } + name: "String", + }, }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", type: { - name: "Number" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", type: { - name: "String" - } + name: "ByteArray", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", type: { - name: "String" - } + name: "Number", + }, }, - date: { - serializedName: "date", - xmlName: "date", + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", type: { - name: "DateTimeRfc1123" - } + name: "Boolean", + }, }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", type: { - name: "Boolean" - } + name: "DateTimeRfc1123", + }, }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", type: { - name: "String" - } + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", type: { - name: "String" - } + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + }, + }, }; -const PageBlobUploadPagesFromURLExceptionHeaders = { - serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", +const BlobDownloadExceptionHeaders = { + serializedName: "Blob_downloadExceptionHeaders", type: { name: "Composite", - className: "PageBlobUploadPagesFromURLExceptionHeaders", + className: "BlobDownloadExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const PageBlobGetPageRangesHeaders = { - serializedName: "PageBlob_getPageRangesHeaders", +const BlobGetPropertiesHeaders = { + serializedName: "Blob_getPropertiesHeaders", type: { name: "Composite", - className: "PageBlobGetPageRangesHeaders", + className: "BlobGetPropertiesHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - etag: { - serializedName: "etag", - xmlName: "etag", + createdOn: { + serializedName: "x-ms-creation-time", + xmlName: "x-ms-creation-time", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", type: { - name: "Number" - } + name: "Dictionary", + value: { type: { name: "String" } }, + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + objectReplicationPolicyId: { + serializedName: "x-ms-or-policy-id", + xmlName: "x-ms-or-policy-id", type: { - name: "String" - } + name: "String", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + objectReplicationRules: { + serializedName: "x-ms-or", + headerCollectionPrefix: "x-ms-or-", + xmlName: "x-ms-or", type: { - name: "String" - } + name: "Dictionary", + value: { type: { name: "String" } }, + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", type: { - name: "String" - } + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, }, - date: { - serializedName: "date", - xmlName: "date", + copyCompletedOn: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", type: { - name: "String" - } - } - } - } -}; -const PageBlobGetPageRangesExceptionHeaders = { - serializedName: "PageBlob_getPageRangesExceptionHeaders", - type: { - name: "Composite", - className: "PageBlobGetPageRangesExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + name: "String", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", type: { - name: "String" - } - } - } - } -}; -const PageBlobGetPageRangesDiffHeaders = { - serializedName: "PageBlob_getPageRangesDiffHeaders", - type: { - name: "Composite", - className: "PageBlobGetPageRangesDiffHeaders", - modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + name: "String", + }, + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, - etag: { - serializedName: "etag", - xmlName: "etag", + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", type: { - name: "String" - } + name: "String", + }, }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", type: { - name: "Number" - } + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, }, - clientRequestId: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", + isIncrementalCopy: { + serializedName: "x-ms-incremental-copy", + xmlName: "x-ms-incremental-copy", type: { - name: "String" - } + name: "Boolean", + }, }, - requestId: { - serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", + destinationSnapshot: { + serializedName: "x-ms-copy-destination-snapshot", + xmlName: "x-ms-copy-destination-snapshot", type: { - name: "String" - } + name: "String", + }, }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", type: { - name: "String" - } + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, }, - date: { - serializedName: "date", - xmlName: "date", + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", type: { - name: "DateTimeRfc1123" - } + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", type: { - name: "String" - } - } - } - } -}; -const PageBlobGetPageRangesDiffExceptionHeaders = { - serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", - type: { - name: "Composite", - className: "PageBlobGetPageRangesDiffExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", type: { - name: "String" - } - } - } - } -}; -const PageBlobResizeHeaders = { - serializedName: "PageBlob_resizeHeaders", - type: { - name: "Composite", - className: "PageBlobResizeHeaders", - modelProperties: { + name: "Number", + }, + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", type: { - name: "DateTimeRfc1123" - } + name: "ByteArray", + }, + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String", + }, + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String", + }, + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String", + }, + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String", + }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { - name: "Number" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + accessTier: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "String", + }, + }, + accessTierInferred: { + serializedName: "x-ms-access-tier-inferred", + xmlName: "x-ms-access-tier-inferred", + type: { + name: "Boolean", + }, + }, + archiveStatus: { + serializedName: "x-ms-archive-status", + xmlName: "x-ms-archive-status", + type: { + name: "String", + }, + }, + accessTierChangedOn: { + serializedName: "x-ms-access-tier-change-time", + xmlName: "x-ms-access-tier-change-time", + type: { + name: "DateTimeRfc1123", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + isCurrentVersion: { + serializedName: "x-ms-is-current-version", + xmlName: "x-ms-is-current-version", + type: { + name: "Boolean", + }, + }, + tagCount: { + serializedName: "x-ms-tag-count", + xmlName: "x-ms-tag-count", + type: { + name: "Number", + }, + }, + expiresOn: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "DateTimeRfc1123", + }, + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean", + }, + }, + rehydratePriority: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"], + }, + }, + lastAccessed: { + serializedName: "x-ms-last-access-time", + xmlName: "x-ms-last-access-time", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyExpiresOn: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123", + }, + }, + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, + }, + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const PageBlobResizeExceptionHeaders = { - serializedName: "PageBlob_resizeExceptionHeaders", +const BlobGetPropertiesExceptionHeaders = { + serializedName: "Blob_getPropertiesExceptionHeaders", type: { name: "Composite", - className: "PageBlobResizeExceptionHeaders", + className: "BlobGetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const PageBlobUpdateSequenceNumberHeaders = { - serializedName: "PageBlob_updateSequenceNumberHeaders", +const BlobDeleteHeaders = { + serializedName: "Blob_deleteHeaders", type: { name: "Composite", - className: "PageBlobUpdateSequenceNumberHeaders", + className: "BlobDeleteHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - blobSequenceNumber: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", - type: { - name: "Number" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const PageBlobUpdateSequenceNumberExceptionHeaders = { - serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", +const BlobDeleteExceptionHeaders = { + serializedName: "Blob_deleteExceptionHeaders", type: { name: "Composite", - className: "PageBlobUpdateSequenceNumberExceptionHeaders", + className: "BlobDeleteExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } -}; -const PageBlobCopyIncrementalHeaders = { - serializedName: "PageBlob_copyIncrementalHeaders", - type: { - name: "Composite", - className: "PageBlobCopyIncrementalHeaders", - modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } + name: "String", + }, }, + }, + }, +}; +const BlobUndeleteHeaders = { + serializedName: "Blob_undeleteHeaders", + type: { + name: "Composite", + className: "BlobUndeleteHeaders", + modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - copyId: { - serializedName: "x-ms-copy-id", - xmlName: "x-ms-copy-id", - type: { - name: "String" - } - }, - copyStatus: { - serializedName: "x-ms-copy-status", - xmlName: "x-ms-copy-status", - type: { - name: "Enum", - allowedValues: ["pending", "success", "aborted", "failed"] - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const PageBlobCopyIncrementalExceptionHeaders = { - serializedName: "PageBlob_copyIncrementalExceptionHeaders", +const BlobUndeleteExceptionHeaders = { + serializedName: "Blob_undeleteExceptionHeaders", type: { name: "Composite", - className: "PageBlobCopyIncrementalExceptionHeaders", + className: "BlobUndeleteExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const AppendBlobCreateHeaders = { - serializedName: "AppendBlob_createHeaders", +const BlobSetExpiryHeaders = { + serializedName: "Blob_setExpiryHeaders", type: { name: "Composite", - className: "AppendBlobCreateHeaders", + className: "BlobSetExpiryHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const AppendBlobCreateExceptionHeaders = { - serializedName: "AppendBlob_createExceptionHeaders", +const BlobSetExpiryExceptionHeaders = { + serializedName: "Blob_setExpiryExceptionHeaders", type: { name: "Composite", - className: "AppendBlobCreateExceptionHeaders", + className: "BlobSetExpiryExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const AppendBlobAppendBlockHeaders = { - serializedName: "AppendBlob_appendBlockHeaders", +const BlobSetHttpHeadersHeaders = { + serializedName: "Blob_setHttpHeadersHeaders", type: { name: "Composite", - className: "AppendBlobAppendBlockHeaders", + className: "BlobSetHttpHeadersHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } + name: "DateTimeRfc1123", + }, }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", type: { - name: "ByteArray" - } + name: "Number", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - blobAppendOffset: { - serializedName: "x-ms-blob-append-offset", - xmlName: "x-ms-blob-append-offset", - type: { - name: "String" - } - }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", - type: { - name: "Number" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const AppendBlobAppendBlockExceptionHeaders = { - serializedName: "AppendBlob_appendBlockExceptionHeaders", +const BlobSetHttpHeadersExceptionHeaders = { + serializedName: "Blob_setHttpHeadersExceptionHeaders", type: { name: "Composite", - className: "AppendBlobAppendBlockExceptionHeaders", + className: "BlobSetHttpHeadersExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const AppendBlobAppendBlockFromUrlHeaders = { - serializedName: "AppendBlob_appendBlockFromUrlHeaders", +const BlobSetImmutabilityPolicyHeaders = { + serializedName: "Blob_setImmutabilityPolicyHeaders", type: { name: "Composite", - className: "AppendBlobAppendBlockFromUrlHeaders", + className: "BlobSetImmutabilityPolicyHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "ByteArray" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", - xmlName: "x-ms-request-id", - type: { - name: "String" - } - }, - version: { - serializedName: "x-ms-version", - xmlName: "x-ms-version", - type: { - name: "String" - } - }, - date: { - serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - blobAppendOffset: { - serializedName: "x-ms-blob-append-offset", - xmlName: "x-ms-blob-append-offset", + xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, - blobCommittedBlockCount: { - serializedName: "x-ms-blob-committed-block-count", - xmlName: "x-ms-blob-committed-block-count", + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", type: { - name: "Number" - } + name: "String", + }, }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", + date: { + serializedName: "date", + xmlName: "date", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + immutabilityPolicyExpiry: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", + immutabilityPolicyMode: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", type: { - name: "Boolean" - } + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const AppendBlobAppendBlockFromUrlExceptionHeaders = { - serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", +const BlobSetImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", type: { name: "Composite", - className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + className: "BlobSetImmutabilityPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const AppendBlobSealHeaders = { - serializedName: "AppendBlob_sealHeaders", +const BlobDeleteImmutabilityPolicyHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyHeaders", type: { name: "Composite", - className: "AppendBlobSealHeaders", + className: "BlobDeleteImmutabilityPolicyHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, - isSealed: { - serializedName: "x-ms-blob-sealed", - xmlName: "x-ms-blob-sealed", - type: { - name: "Boolean" - } - } - } - } + }, + }, }; -const AppendBlobSealExceptionHeaders = { - serializedName: "AppendBlob_sealExceptionHeaders", +const BlobDeleteImmutabilityPolicyExceptionHeaders = { + serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", type: { name: "Composite", - className: "AppendBlobSealExceptionHeaders", + className: "BlobDeleteImmutabilityPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlockBlobUploadHeaders = { - serializedName: "BlockBlob_uploadHeaders", +const BlobSetLegalHoldHeaders = { + serializedName: "Blob_setLegalHoldHeaders", type: { name: "Composite", - className: "BlockBlobUploadHeaders", + className: "BlobSetLegalHoldHeaders", modelProperties: { - etag: { - serializedName: "etag", - xmlName: "etag", - type: { - name: "String" - } - }, - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } - }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", + legalHold: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", type: { - name: "String" - } + name: "Boolean", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const BlockBlobUploadExceptionHeaders = { - serializedName: "BlockBlob_uploadExceptionHeaders", +const BlobSetLegalHoldExceptionHeaders = { + serializedName: "Blob_setLegalHoldExceptionHeaders", type: { name: "Composite", - className: "BlockBlobUploadExceptionHeaders", + className: "BlobSetLegalHoldExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlockBlobPutBlobFromUrlHeaders = { - serializedName: "BlockBlob_putBlobFromUrlHeaders", +const BlobSetMetadataHeaders = { + serializedName: "Blob_setMetadataHeaders", type: { name: "Composite", - className: "BlockBlobPutBlobFromUrlHeaders", + className: "BlobSetMetadataHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } + name: "DateTimeRfc1123", + }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { - name: "Boolean" - } + name: "Boolean", + }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { - name: "String" - } + name: "String", + }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { - name: "String" - } + name: "String", + }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlockBlobPutBlobFromUrlExceptionHeaders = { - serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", +const BlobSetMetadataExceptionHeaders = { + serializedName: "Blob_setMetadataExceptionHeaders", type: { name: "Composite", - className: "BlockBlobPutBlobFromUrlExceptionHeaders", + className: "BlobSetMetadataExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlockBlobStageBlockHeaders = { - serializedName: "BlockBlob_stageBlockHeaders", +const BlobAcquireLeaseHeaders = { + serializedName: "Blob_acquireLeaseHeaders", type: { name: "Composite", - className: "BlockBlobStageBlockHeaders", + className: "BlobAcquireLeaseHeaders", modelProperties: { - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + etag: { + serializedName: "etag", + xmlName: "etag", type: { - name: "ByteArray" - } + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", - type: { - name: "ByteArray" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const BlockBlobStageBlockExceptionHeaders = { - serializedName: "BlockBlob_stageBlockExceptionHeaders", +const BlobAcquireLeaseExceptionHeaders = { + serializedName: "Blob_acquireLeaseExceptionHeaders", type: { name: "Composite", - className: "BlockBlobStageBlockExceptionHeaders", + className: "BlobAcquireLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlockBlobStageBlockFromURLHeaders = { - serializedName: "BlockBlob_stageBlockFromURLHeaders", +const BlobReleaseLeaseHeaders = { + serializedName: "Blob_releaseLeaseHeaders", type: { name: "Composite", - className: "BlockBlobStageBlockFromURLHeaders", + className: "BlobReleaseLeaseHeaders", modelProperties: { - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", + etag: { + serializedName: "etag", + xmlName: "etag", type: { - name: "ByteArray" - } + name: "String", + }, }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "ByteArray" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const BlockBlobStageBlockFromURLExceptionHeaders = { - serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", +const BlobReleaseLeaseExceptionHeaders = { + serializedName: "Blob_releaseLeaseExceptionHeaders", type: { name: "Composite", - className: "BlockBlobStageBlockFromURLExceptionHeaders", + className: "BlobReleaseLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlockBlobCommitBlockListHeaders = { - serializedName: "BlockBlob_commitBlockListHeaders", +const BlobRenewLeaseHeaders = { + serializedName: "Blob_renewLeaseHeaders", type: { name: "Composite", - className: "BlockBlobCommitBlockListHeaders", + className: "BlobRenewLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } + name: "String", + }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { - name: "DateTimeRfc1123" - } - }, - contentMD5: { - serializedName: "content-md5", - xmlName: "content-md5", - type: { - name: "ByteArray" - } + name: "DateTimeRfc1123", + }, }, - xMsContentCrc64: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", type: { - name: "ByteArray" - } + name: "String", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } - }, - versionId: { - serializedName: "x-ms-version-id", - xmlName: "x-ms-version-id", - type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", xmlName: "date", type: { - name: "DateTimeRfc1123" - } - }, - isServerEncrypted: { - serializedName: "x-ms-request-server-encrypted", - xmlName: "x-ms-request-server-encrypted", - type: { - name: "Boolean" - } - }, - encryptionKeySha256: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - }, - encryptionScope: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } + name: "DateTimeRfc1123", + }, }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } + }, + }, }; -const BlockBlobCommitBlockListExceptionHeaders = { - serializedName: "BlockBlob_commitBlockListExceptionHeaders", +const BlobRenewLeaseExceptionHeaders = { + serializedName: "Blob_renewLeaseExceptionHeaders", type: { name: "Composite", - className: "BlockBlobCommitBlockListExceptionHeaders", + className: "BlobRenewLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { - name: "String" - } - } - } - } + name: "String", + }, + }, + }, + }, }; -const BlockBlobGetBlockListHeaders = { - serializedName: "BlockBlob_getBlockListHeaders", +const BlobChangeLeaseHeaders = { + serializedName: "Blob_changeLeaseHeaders", type: { name: "Composite", - className: "BlockBlobGetBlockListHeaders", + className: "BlobChangeLeaseHeaders", modelProperties: { - lastModified: { - serializedName: "last-modified", - xmlName: "last-modified", - type: { - name: "DateTimeRfc1123" - } - }, etag: { serializedName: "etag", xmlName: "etag", type: { - name: "String" - } - }, - contentType: { - serializedName: "content-type", - xmlName: "content-type", - type: { - name: "String" - } + name: "String", + }, }, - blobContentLength: { - serializedName: "x-ms-blob-content-length", - xmlName: "x-ms-blob-content-length", + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", type: { - name: "Number" - } + name: "DateTimeRfc1123", + }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { - name: "String" - } + name: "String", + }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { - name: "String" - } + name: "String", + }, + }, + leaseId: { + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", + type: { + name: "String", + }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { - name: "String" - } + name: "String", + }, }, date: { serializedName: "date", - xmlName: "date", - type: { - name: "DateTimeRfc1123" - } - }, - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; -const BlockBlobGetBlockListExceptionHeaders = { - serializedName: "BlockBlob_getBlockListExceptionHeaders", - type: { - name: "Composite", - className: "BlockBlobGetBlockListExceptionHeaders", - modelProperties: { - errorCode: { - serializedName: "x-ms-error-code", - xmlName: "x-ms-error-code", - type: { - name: "String" - } - } - } - } -}; - -var Mappers = /*#__PURE__*/Object.freeze({ - __proto__: null, - BlobServiceProperties: BlobServiceProperties, - Logging: Logging, - RetentionPolicy: RetentionPolicy, - Metrics: Metrics, - CorsRule: CorsRule, - StaticWebsite: StaticWebsite, - StorageError: StorageError, - BlobServiceStatistics: BlobServiceStatistics, - GeoReplication: GeoReplication, - ListContainersSegmentResponse: ListContainersSegmentResponse, - ContainerItem: ContainerItem, - ContainerProperties: ContainerProperties, - KeyInfo: KeyInfo, - UserDelegationKey: UserDelegationKey, - FilterBlobSegment: FilterBlobSegment, - FilterBlobItem: FilterBlobItem, - BlobTags: BlobTags, - BlobTag: BlobTag, - SignedIdentifier: SignedIdentifier, - AccessPolicy: AccessPolicy, - ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, - BlobFlatListSegment: BlobFlatListSegment, - BlobItemInternal: BlobItemInternal, - BlobName: BlobName, - BlobPropertiesInternal: BlobPropertiesInternal, - ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, - BlobHierarchyListSegment: BlobHierarchyListSegment, - BlobPrefix: BlobPrefix, - BlockLookupList: BlockLookupList, - BlockList: BlockList, - Block: Block, - PageList: PageList, - PageRange: PageRange, - ClearRange: ClearRange, - QueryRequest: QueryRequest, - QuerySerialization: QuerySerialization, - QueryFormat: QueryFormat, - DelimitedTextConfiguration: DelimitedTextConfiguration, - JsonTextConfiguration: JsonTextConfiguration, - ArrowConfiguration: ArrowConfiguration, - ArrowField: ArrowField, - ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, - ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, - ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, - ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, - ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, - ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, - ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, - ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, - ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, - ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, - ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, - ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, - ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, - ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, - ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, - ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, - ContainerCreateHeaders: ContainerCreateHeaders, - ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, - ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, - ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, - ContainerDeleteHeaders: ContainerDeleteHeaders, - ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, - ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, - ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, - ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, - ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, - ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, - ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, - ContainerRestoreHeaders: ContainerRestoreHeaders, - ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, - ContainerRenameHeaders: ContainerRenameHeaders, - ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, - ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, - ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, - ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, - ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, - ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, - ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, - ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, - ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, - ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, - ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, - ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, - ContainerBreakLeaseExceptionHeaders: ContainerBreakLeaseExceptionHeaders, - ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, - ContainerChangeLeaseExceptionHeaders: ContainerChangeLeaseExceptionHeaders, - ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, - ContainerListBlobFlatSegmentExceptionHeaders: ContainerListBlobFlatSegmentExceptionHeaders, - ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, - ContainerListBlobHierarchySegmentExceptionHeaders: ContainerListBlobHierarchySegmentExceptionHeaders, - ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, - ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, - BlobDownloadHeaders: BlobDownloadHeaders, - BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, - BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, - BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, - BlobDeleteHeaders: BlobDeleteHeaders, - BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, - BlobUndeleteHeaders: BlobUndeleteHeaders, - BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, - BlobSetExpiryHeaders: BlobSetExpiryHeaders, - BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, - BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, - BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, - BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, - BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, - BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, - BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, - BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, - BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, - BlobSetMetadataHeaders: BlobSetMetadataHeaders, - BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, - BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, - BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, - BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, - BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, - BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, - BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, - BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, - BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, - BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, - BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, - BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, - BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, - BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, - BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, - BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, - BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, - BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, - BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, - BlobSetTierHeaders: BlobSetTierHeaders, - BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, - BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, - BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, - BlobQueryHeaders: BlobQueryHeaders, - BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, - BlobGetTagsHeaders: BlobGetTagsHeaders, - BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, - BlobSetTagsHeaders: BlobSetTagsHeaders, - BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, - PageBlobCreateHeaders: PageBlobCreateHeaders, - PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, - PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, - PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, - PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, - PageBlobClearPagesExceptionHeaders: PageBlobClearPagesExceptionHeaders, - PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, - PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, - PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, - PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, - PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, - PageBlobGetPageRangesDiffExceptionHeaders: PageBlobGetPageRangesDiffExceptionHeaders, - PageBlobResizeHeaders: PageBlobResizeHeaders, - PageBlobResizeExceptionHeaders: PageBlobResizeExceptionHeaders, - PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, - PageBlobUpdateSequenceNumberExceptionHeaders: PageBlobUpdateSequenceNumberExceptionHeaders, - PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, - PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, - AppendBlobCreateHeaders: AppendBlobCreateHeaders, - AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, - AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, - AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, - AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, - AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, - AppendBlobSealHeaders: AppendBlobSealHeaders, - AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, - BlockBlobUploadHeaders: BlockBlobUploadHeaders, - BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, - BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, - BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, - BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, - BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, - BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, - BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, - BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, - BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, - BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, - BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders -}); - -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -const contentType = { - parameterPath: ["options", "contentType"], - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Content-Type", - type: { - name: "String" - } - } -}; -const blobServiceProperties = { - parameterPath: "blobServiceProperties", - mapper: BlobServiceProperties -}; -const accept = { - parameterPath: "accept", - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", - type: { - name: "String" - } - } -}; -const url = { - parameterPath: "url", - mapper: { - serializedName: "url", - required: true, - xmlName: "url", - type: { - name: "String" - } + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, }, - skipEncoding: true -}; -const restype = { - parameterPath: "restype", - mapper: { - defaultValue: "service", - isConstant: true, - serializedName: "restype", - type: { - name: "String" - } - } -}; -const comp = { - parameterPath: "comp", - mapper: { - defaultValue: "properties", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } }; -const timeoutInSeconds = { - parameterPath: ["options", "timeoutInSeconds"], - mapper: { - constraints: { - InclusiveMinimum: 0 +const BlobChangeLeaseExceptionHeaders = { + serializedName: "Blob_changeLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobChangeLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, }, - serializedName: "timeout", - xmlName: "timeout", - type: { - name: "Number" - } - } -}; -const version = { - parameterPath: "version", - mapper: { - defaultValue: "2023-11-03", - isConstant: true, - serializedName: "x-ms-version", - type: { - name: "String" - } - } -}; -const requestId = { - parameterPath: ["options", "requestId"], - mapper: { - serializedName: "x-ms-client-request-id", - xmlName: "x-ms-client-request-id", - type: { - name: "String" - } - } -}; -const accept1 = { - parameterPath: "accept", - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", - type: { - name: "String" - } - } -}; -const comp1 = { - parameterPath: "comp", - mapper: { - defaultValue: "stats", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } + }, }; -const comp2 = { - parameterPath: "comp", - mapper: { - defaultValue: "list", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } +const BlobBreakLeaseHeaders = { + serializedName: "Blob_breakLeaseHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + leaseTime: { + serializedName: "x-ms-lease-time", + xmlName: "x-ms-lease-time", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + }, + }, }; -const prefix = { - parameterPath: ["options", "prefix"], - mapper: { - serializedName: "prefix", - xmlName: "prefix", - type: { - name: "String" - } - } +const BlobBreakLeaseExceptionHeaders = { + serializedName: "Blob_breakLeaseExceptionHeaders", + type: { + name: "Composite", + className: "BlobBreakLeaseExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const marker = { - parameterPath: ["options", "marker"], - mapper: { - serializedName: "marker", - xmlName: "marker", - type: { - name: "String" - } - } +const BlobCreateSnapshotHeaders = { + serializedName: "Blob_createSnapshotHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotHeaders", + modelProperties: { + snapshot: { + serializedName: "x-ms-snapshot", + xmlName: "x-ms-snapshot", + type: { + name: "String", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const maxPageSize = { - parameterPath: ["options", "maxPageSize"], - mapper: { - constraints: { - InclusiveMinimum: 1 +const BlobCreateSnapshotExceptionHeaders = { + serializedName: "Blob_createSnapshotExceptionHeaders", + type: { + name: "Composite", + className: "BlobCreateSnapshotExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, }, - serializedName: "maxresults", - xmlName: "maxresults", - type: { - name: "Number" - } - } + }, }; -const include = { - parameterPath: ["options", "include"], - mapper: { - serializedName: "include", - xmlName: "include", - xmlElementName: "ListContainersIncludeType", - type: { - name: "Sequence", - element: { +const BlobStartCopyFromURLHeaders = { + serializedName: "Blob_startCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", type: { name: "Enum", - allowedValues: ["metadata", "deleted", "system"] - } - } - } + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, }, - collectionFormat: coreHttp.QueryCollectionFormat.Csv -}; -const keyInfo = { - parameterPath: "keyInfo", - mapper: KeyInfo -}; -const comp3 = { - parameterPath: "comp", - mapper: { - defaultValue: "userdelegationkey", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } -}; -const restype1 = { - parameterPath: "restype", - mapper: { - defaultValue: "account", - isConstant: true, - serializedName: "restype", - type: { - name: "String" - } - } -}; -const body = { - parameterPath: "body", - mapper: { - serializedName: "body", - required: true, - xmlName: "body", - type: { - name: "Stream" - } - } -}; -const comp4 = { - parameterPath: "comp", - mapper: { - defaultValue: "batch", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } -}; -const contentLength = { - parameterPath: "contentLength", - mapper: { - serializedName: "Content-Length", - required: true, - xmlName: "Content-Length", - type: { - name: "Number" - } - } -}; -const multipartContentType = { - parameterPath: "multipartContentType", - mapper: { - serializedName: "Content-Type", - required: true, - xmlName: "Content-Type", - type: { - name: "String" - } - } -}; -const comp5 = { - parameterPath: "comp", - mapper: { - defaultValue: "blobs", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } -}; -const where = { - parameterPath: ["options", "where"], - mapper: { - serializedName: "where", - xmlName: "where", - type: { - name: "String" - } - } }; -const restype2 = { - parameterPath: "restype", - mapper: { - defaultValue: "container", - isConstant: true, - serializedName: "restype", - type: { - name: "String" - } - } +const BlobStartCopyFromURLExceptionHeaders = { + serializedName: "Blob_startCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobStartCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const metadata = { - parameterPath: ["options", "metadata"], - mapper: { - serializedName: "x-ms-meta", - xmlName: "x-ms-meta", - type: { - name: "Dictionary", - value: { type: { name: "String" } } +const BlobCopyFromURLHeaders = { + serializedName: "Blob_copyFromURLHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyStatus: { + defaultValue: "success", + isConstant: true, + serializedName: "x-ms-copy-status", + type: { + name: "String", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, }, - headerCollectionPrefix: "x-ms-meta-" - } -}; -const access = { - parameterPath: ["options", "access"], - mapper: { - serializedName: "x-ms-blob-public-access", - xmlName: "x-ms-blob-public-access", - type: { - name: "Enum", - allowedValues: ["container", "blob"] - } - } + }, }; -const defaultEncryptionScope = { - parameterPath: [ - "options", - "containerEncryptionScope", - "defaultEncryptionScope" - ], - mapper: { - serializedName: "x-ms-default-encryption-scope", - xmlName: "x-ms-default-encryption-scope", - type: { - name: "String" - } - } +const BlobCopyFromURLExceptionHeaders = { + serializedName: "Blob_copyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const preventEncryptionScopeOverride = { - parameterPath: [ - "options", - "containerEncryptionScope", - "preventEncryptionScopeOverride" - ], - mapper: { - serializedName: "x-ms-deny-encryption-scope-override", - xmlName: "x-ms-deny-encryption-scope-override", - type: { - name: "Boolean" - } - } +const BlobAbortCopyFromURLHeaders = { + serializedName: "Blob_abortCopyFromURLHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const leaseId = { - parameterPath: ["options", "leaseAccessConditions", "leaseId"], - mapper: { - serializedName: "x-ms-lease-id", - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - } +const BlobAbortCopyFromURLExceptionHeaders = { + serializedName: "Blob_abortCopyFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlobAbortCopyFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const ifModifiedSince = { - parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], - mapper: { - serializedName: "If-Modified-Since", - xmlName: "If-Modified-Since", - type: { - name: "DateTimeRfc1123" - } - } +const BlobSetTierHeaders = { + serializedName: "Blob_setTierHeaders", + type: { + name: "Composite", + className: "BlobSetTierHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const ifUnmodifiedSince = { - parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], - mapper: { - serializedName: "If-Unmodified-Since", - xmlName: "If-Unmodified-Since", - type: { - name: "DateTimeRfc1123" - } - } +const BlobSetTierExceptionHeaders = { + serializedName: "Blob_setTierExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTierExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const comp6 = { - parameterPath: "comp", - mapper: { - defaultValue: "metadata", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } +const BlobGetAccountInfoHeaders = { + serializedName: "Blob_getAccountInfoHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + skuName: { + serializedName: "x-ms-sku-name", + xmlName: "x-ms-sku-name", + type: { + name: "Enum", + allowedValues: [ + "Standard_LRS", + "Standard_GRS", + "Standard_RAGRS", + "Standard_ZRS", + "Premium_LRS", + ], + }, + }, + accountKind: { + serializedName: "x-ms-account-kind", + xmlName: "x-ms-account-kind", + type: { + name: "Enum", + allowedValues: [ + "Storage", + "BlobStorage", + "StorageV2", + "FileStorage", + "BlockBlobStorage", + ], + }, + }, + }, + }, }; -const comp7 = { - parameterPath: "comp", - mapper: { - defaultValue: "acl", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } +const BlobGetAccountInfoExceptionHeaders = { + serializedName: "Blob_getAccountInfoExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetAccountInfoExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const containerAcl = { - parameterPath: ["options", "containerAcl"], - mapper: { - serializedName: "containerAcl", - xmlName: "SignedIdentifiers", - xmlIsWrapped: true, - xmlElementName: "SignedIdentifier", - type: { - name: "Sequence", - element: { +const BlobQueryHeaders = { + serializedName: "Blob_queryHeaders", + type: { + name: "Composite", + className: "BlobQueryHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + metadata: { + serializedName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", + xmlName: "x-ms-meta", + type: { + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, + contentLength: { + serializedName: "content-length", + xmlName: "content-length", + type: { + name: "Number", + }, + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + contentRange: { + serializedName: "content-range", + xmlName: "content-range", + type: { + name: "String", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + contentEncoding: { + serializedName: "content-encoding", + xmlName: "content-encoding", + type: { + name: "String", + }, + }, + cacheControl: { + serializedName: "cache-control", + xmlName: "cache-control", + type: { + name: "String", + }, + }, + contentDisposition: { + serializedName: "content-disposition", + xmlName: "content-disposition", + type: { + name: "String", + }, + }, + contentLanguage: { + serializedName: "content-language", + xmlName: "content-language", + type: { + name: "String", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + blobType: { + serializedName: "x-ms-blob-type", + xmlName: "x-ms-blob-type", + type: { + name: "Enum", + allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], + }, + }, + copyCompletionTime: { + serializedName: "x-ms-copy-completion-time", + xmlName: "x-ms-copy-completion-time", + type: { + name: "DateTimeRfc1123", + }, + }, + copyStatusDescription: { + serializedName: "x-ms-copy-status-description", + xmlName: "x-ms-copy-status-description", + type: { + name: "String", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyProgress: { + serializedName: "x-ms-copy-progress", + xmlName: "x-ms-copy-progress", + type: { + name: "String", + }, + }, + copySource: { + serializedName: "x-ms-copy-source", + xmlName: "x-ms-copy-source", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + leaseDuration: { + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", + type: { + name: "Enum", + allowedValues: ["infinite", "fixed"], + }, + }, + leaseState: { + serializedName: "x-ms-lease-state", + xmlName: "x-ms-lease-state", + type: { + name: "Enum", + allowedValues: [ + "available", + "leased", + "expired", + "breaking", + "broken", + ], + }, + }, + leaseStatus: { + serializedName: "x-ms-lease-status", + xmlName: "x-ms-lease-status", + type: { + name: "Enum", + allowedValues: ["locked", "unlocked"], + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + acceptRanges: { + serializedName: "accept-ranges", + xmlName: "accept-ranges", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-server-encrypted", + xmlName: "x-ms-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + blobContentMD5: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + contentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", type: { - name: "Composite", - className: "SignedIdentifier" - } - } - } - } -}; -const comp8 = { - parameterPath: "comp", - mapper: { - defaultValue: "undelete", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } -}; -const deletedContainerName = { - parameterPath: ["options", "deletedContainerName"], - mapper: { - serializedName: "x-ms-deleted-container-name", - xmlName: "x-ms-deleted-container-name", - type: { - name: "String" - } - } + name: "ByteArray", + }, + }, + }, + }, }; -const deletedContainerVersion = { - parameterPath: ["options", "deletedContainerVersion"], - mapper: { - serializedName: "x-ms-deleted-container-version", - xmlName: "x-ms-deleted-container-version", - type: { - name: "String" - } - } +const BlobQueryExceptionHeaders = { + serializedName: "Blob_queryExceptionHeaders", + type: { + name: "Composite", + className: "BlobQueryExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const comp9 = { - parameterPath: "comp", - mapper: { - defaultValue: "rename", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } +const BlobGetTagsHeaders = { + serializedName: "Blob_getTagsHeaders", + type: { + name: "Composite", + className: "BlobGetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const sourceContainerName = { - parameterPath: "sourceContainerName", - mapper: { - serializedName: "x-ms-source-container-name", - required: true, - xmlName: "x-ms-source-container-name", - type: { - name: "String" - } - } +const BlobGetTagsExceptionHeaders = { + serializedName: "Blob_getTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobGetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const sourceLeaseId = { - parameterPath: ["options", "sourceLeaseId"], - mapper: { - serializedName: "x-ms-source-lease-id", - xmlName: "x-ms-source-lease-id", - type: { - name: "String" - } - } +const BlobSetTagsHeaders = { + serializedName: "Blob_setTagsHeaders", + type: { + name: "Composite", + className: "BlobSetTagsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const comp10 = { - parameterPath: "comp", - mapper: { - defaultValue: "lease", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } +const BlobSetTagsExceptionHeaders = { + serializedName: "Blob_setTagsExceptionHeaders", + type: { + name: "Composite", + className: "BlobSetTagsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const action = { - parameterPath: "action", - mapper: { - defaultValue: "acquire", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" - } - } +const PageBlobCreateHeaders = { + serializedName: "PageBlob_createHeaders", + type: { + name: "Composite", + className: "PageBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const duration = { - parameterPath: ["options", "duration"], - mapper: { - serializedName: "x-ms-lease-duration", - xmlName: "x-ms-lease-duration", - type: { - name: "Number" - } - } +const PageBlobCreateExceptionHeaders = { + serializedName: "PageBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const proposedLeaseId = { - parameterPath: ["options", "proposedLeaseId"], - mapper: { - serializedName: "x-ms-proposed-lease-id", - xmlName: "x-ms-proposed-lease-id", - type: { - name: "String" - } - } +const PageBlobUploadPagesHeaders = { + serializedName: "PageBlob_uploadPagesHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const action1 = { - parameterPath: "action", - mapper: { - defaultValue: "release", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" - } - } +const PageBlobUploadPagesExceptionHeaders = { + serializedName: "PageBlob_uploadPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const leaseId1 = { - parameterPath: "leaseId", - mapper: { - serializedName: "x-ms-lease-id", - required: true, - xmlName: "x-ms-lease-id", - type: { - name: "String" - } - } +const PageBlobClearPagesHeaders = { + serializedName: "PageBlob_clearPagesHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const action2 = { - parameterPath: "action", - mapper: { - defaultValue: "renew", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" - } - } +const PageBlobClearPagesExceptionHeaders = { + serializedName: "PageBlob_clearPagesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobClearPagesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const action3 = { - parameterPath: "action", - mapper: { - defaultValue: "break", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" - } - } +const PageBlobUploadPagesFromURLHeaders = { + serializedName: "PageBlob_uploadPagesFromURLHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const breakPeriod = { - parameterPath: ["options", "breakPeriod"], - mapper: { - serializedName: "x-ms-lease-break-period", - xmlName: "x-ms-lease-break-period", - type: { - name: "Number" - } - } +const PageBlobUploadPagesFromURLExceptionHeaders = { + serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUploadPagesFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const action4 = { - parameterPath: "action", - mapper: { - defaultValue: "change", - isConstant: true, - serializedName: "x-ms-lease-action", - type: { - name: "String" - } - } +const PageBlobGetPageRangesHeaders = { + serializedName: "PageBlob_getPageRangesHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const proposedLeaseId1 = { - parameterPath: "proposedLeaseId", - mapper: { - serializedName: "x-ms-proposed-lease-id", - required: true, - xmlName: "x-ms-proposed-lease-id", - type: { - name: "String" - } - } +const PageBlobGetPageRangesExceptionHeaders = { + serializedName: "PageBlob_getPageRangesExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, +}; +const PageBlobGetPageRangesDiffHeaders = { + serializedName: "PageBlob_getPageRangesDiffHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const include1 = { - parameterPath: ["options", "include"], - mapper: { - serializedName: "include", - xmlName: "include", - xmlElementName: "ListBlobsIncludeItem", - type: { - name: "Sequence", - element: { +const PageBlobGetPageRangesDiffExceptionHeaders = { + serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobGetPageRangesDiffExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", type: { - name: "Enum", - allowedValues: [ - "copy", - "deleted", - "metadata", - "snapshots", - "uncommittedblobs", - "versions", - "tags", - "immutabilitypolicy", - "legalhold", - "deletedwithversions" - ] - } - } - } + name: "String", + }, + }, + }, }, - collectionFormat: coreHttp.QueryCollectionFormat.Csv }; -const delimiter = { - parameterPath: "delimiter", - mapper: { - serializedName: "delimiter", - required: true, - xmlName: "delimiter", - type: { - name: "String" - } - } +const PageBlobResizeHeaders = { + serializedName: "PageBlob_resizeHeaders", + type: { + name: "Composite", + className: "PageBlobResizeHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const snapshot = { - parameterPath: ["options", "snapshot"], - mapper: { - serializedName: "snapshot", - xmlName: "snapshot", - type: { - name: "String" - } - } +const PageBlobResizeExceptionHeaders = { + serializedName: "PageBlob_resizeExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobResizeExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const versionId = { - parameterPath: ["options", "versionId"], - mapper: { - serializedName: "versionid", - xmlName: "versionid", - type: { - name: "String" - } - } +const PageBlobUpdateSequenceNumberHeaders = { + serializedName: "PageBlob_updateSequenceNumberHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + blobSequenceNumber: { + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const range = { - parameterPath: ["options", "range"], - mapper: { - serializedName: "x-ms-range", - xmlName: "x-ms-range", - type: { - name: "String" - } - } +const PageBlobUpdateSequenceNumberExceptionHeaders = { + serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobUpdateSequenceNumberExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const rangeGetContentMD5 = { - parameterPath: ["options", "rangeGetContentMD5"], - mapper: { - serializedName: "x-ms-range-get-content-md5", - xmlName: "x-ms-range-get-content-md5", - type: { - name: "Boolean" - } - } +const PageBlobCopyIncrementalHeaders = { + serializedName: "PageBlob_copyIncrementalHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + copyId: { + serializedName: "x-ms-copy-id", + xmlName: "x-ms-copy-id", + type: { + name: "String", + }, + }, + copyStatus: { + serializedName: "x-ms-copy-status", + xmlName: "x-ms-copy-status", + type: { + name: "Enum", + allowedValues: ["pending", "success", "aborted", "failed"], + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const rangeGetContentCRC64 = { - parameterPath: ["options", "rangeGetContentCRC64"], - mapper: { - serializedName: "x-ms-range-get-content-crc64", - xmlName: "x-ms-range-get-content-crc64", - type: { - name: "Boolean" - } - } +const PageBlobCopyIncrementalExceptionHeaders = { + serializedName: "PageBlob_copyIncrementalExceptionHeaders", + type: { + name: "Composite", + className: "PageBlobCopyIncrementalExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const encryptionKey = { - parameterPath: ["options", "cpkInfo", "encryptionKey"], - mapper: { - serializedName: "x-ms-encryption-key", - xmlName: "x-ms-encryption-key", - type: { - name: "String" - } - } +const AppendBlobCreateHeaders = { + serializedName: "AppendBlob_createHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const encryptionKeySha256 = { - parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], - mapper: { - serializedName: "x-ms-encryption-key-sha256", - xmlName: "x-ms-encryption-key-sha256", - type: { - name: "String" - } - } +const AppendBlobCreateExceptionHeaders = { + serializedName: "AppendBlob_createExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobCreateExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const encryptionAlgorithm = { - parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], - mapper: { - serializedName: "x-ms-encryption-algorithm", - xmlName: "x-ms-encryption-algorithm", - type: { - name: "String" - } - } +const AppendBlobAppendBlockHeaders = { + serializedName: "AppendBlob_appendBlockHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const ifMatch = { - parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], - mapper: { - serializedName: "If-Match", - xmlName: "If-Match", - type: { - name: "String" - } - } +const AppendBlobAppendBlockExceptionHeaders = { + serializedName: "AppendBlob_appendBlockExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const ifNoneMatch = { - parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], - mapper: { - serializedName: "If-None-Match", - xmlName: "If-None-Match", - type: { - name: "String" - } - } +const AppendBlobAppendBlockFromUrlHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + blobAppendOffset: { + serializedName: "x-ms-blob-append-offset", + xmlName: "x-ms-blob-append-offset", + type: { + name: "String", + }, + }, + blobCommittedBlockCount: { + serializedName: "x-ms-blob-committed-block-count", + xmlName: "x-ms-blob-committed-block-count", + type: { + name: "Number", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const ifTags = { - parameterPath: ["options", "modifiedAccessConditions", "ifTags"], - mapper: { - serializedName: "x-ms-if-tags", - xmlName: "x-ms-if-tags", - type: { - name: "String" - } - } +const AppendBlobAppendBlockFromUrlExceptionHeaders = { + serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobAppendBlockFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const deleteSnapshots = { - parameterPath: ["options", "deleteSnapshots"], - mapper: { - serializedName: "x-ms-delete-snapshots", - xmlName: "x-ms-delete-snapshots", - type: { - name: "Enum", - allowedValues: ["include", "only"] - } - } +const AppendBlobSealHeaders = { + serializedName: "AppendBlob_sealHeaders", + type: { + name: "Composite", + className: "AppendBlobSealHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isSealed: { + serializedName: "x-ms-blob-sealed", + xmlName: "x-ms-blob-sealed", + type: { + name: "Boolean", + }, + }, + }, + }, }; -const blobDeleteType = { - parameterPath: ["options", "blobDeleteType"], - mapper: { - serializedName: "deletetype", - xmlName: "deletetype", - type: { - name: "String" - } - } +const AppendBlobSealExceptionHeaders = { + serializedName: "AppendBlob_sealExceptionHeaders", + type: { + name: "Composite", + className: "AppendBlobSealExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const comp11 = { - parameterPath: "comp", - mapper: { - defaultValue: "expiry", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } +const BlockBlobUploadHeaders = { + serializedName: "BlockBlob_uploadHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const expiryOptions = { - parameterPath: "expiryOptions", - mapper: { - serializedName: "x-ms-expiry-option", - required: true, - xmlName: "x-ms-expiry-option", - type: { - name: "String" - } - } +const BlockBlobUploadExceptionHeaders = { + serializedName: "BlockBlob_uploadExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobUploadExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const expiresOn = { - parameterPath: ["options", "expiresOn"], - mapper: { - serializedName: "x-ms-expiry-time", - xmlName: "x-ms-expiry-time", - type: { - name: "String" - } - } +const BlockBlobPutBlobFromUrlHeaders = { + serializedName: "BlockBlob_putBlobFromUrlHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const blobCacheControl = { - parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], - mapper: { - serializedName: "x-ms-blob-cache-control", - xmlName: "x-ms-blob-cache-control", - type: { - name: "String" - } - } +const BlockBlobPutBlobFromUrlExceptionHeaders = { + serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobPutBlobFromUrlExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const blobContentType = { - parameterPath: ["options", "blobHttpHeaders", "blobContentType"], - mapper: { - serializedName: "x-ms-blob-content-type", - xmlName: "x-ms-blob-content-type", - type: { - name: "String" - } - } +const BlockBlobStageBlockHeaders = { + serializedName: "BlockBlob_stageBlockHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const blobContentMD5 = { - parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], - mapper: { - serializedName: "x-ms-blob-content-md5", - xmlName: "x-ms-blob-content-md5", - type: { - name: "ByteArray" - } - } +const BlockBlobStageBlockExceptionHeaders = { + serializedName: "BlockBlob_stageBlockExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const blobContentEncoding = { - parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], - mapper: { - serializedName: "x-ms-blob-content-encoding", - xmlName: "x-ms-blob-content-encoding", - type: { - name: "String" - } - } +const BlockBlobStageBlockFromURLHeaders = { + serializedName: "BlockBlob_stageBlockFromURLHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLHeaders", + modelProperties: { + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const blobContentLanguage = { - parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], - mapper: { - serializedName: "x-ms-blob-content-language", - xmlName: "x-ms-blob-content-language", - type: { - name: "String" - } - } +const BlockBlobStageBlockFromURLExceptionHeaders = { + serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobStageBlockFromURLExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const blobContentDisposition = { - parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], - mapper: { - serializedName: "x-ms-blob-content-disposition", - xmlName: "x-ms-blob-content-disposition", - type: { - name: "String" - } - } +const BlockBlobCommitBlockListHeaders = { + serializedName: "BlockBlob_commitBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListHeaders", + modelProperties: { + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + contentMD5: { + serializedName: "content-md5", + xmlName: "content-md5", + type: { + name: "ByteArray", + }, + }, + xMsContentCrc64: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + versionId: { + serializedName: "x-ms-version-id", + xmlName: "x-ms-version-id", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + isServerEncrypted: { + serializedName: "x-ms-request-server-encrypted", + xmlName: "x-ms-request-server-encrypted", + type: { + name: "Boolean", + }, + }, + encryptionKeySha256: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const comp12 = { - parameterPath: "comp", - mapper: { - defaultValue: "immutabilityPolicies", - isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } +const BlockBlobCommitBlockListExceptionHeaders = { + serializedName: "BlockBlob_commitBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobCommitBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const immutabilityPolicyExpiry = { - parameterPath: ["options", "immutabilityPolicyExpiry"], - mapper: { - serializedName: "x-ms-immutability-policy-until-date", - xmlName: "x-ms-immutability-policy-until-date", - type: { - name: "DateTimeRfc1123" - } - } +const BlockBlobGetBlockListHeaders = { + serializedName: "BlockBlob_getBlockListHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListHeaders", + modelProperties: { + lastModified: { + serializedName: "last-modified", + xmlName: "last-modified", + type: { + name: "DateTimeRfc1123", + }, + }, + etag: { + serializedName: "etag", + xmlName: "etag", + type: { + name: "String", + }, + }, + contentType: { + serializedName: "content-type", + xmlName: "content-type", + type: { + name: "String", + }, + }, + blobContentLength: { + serializedName: "x-ms-blob-content-length", + xmlName: "x-ms-blob-content-length", + type: { + name: "Number", + }, + }, + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String", + }, + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String", + }, + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String", + }, + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123", + }, + }, + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const immutabilityPolicyMode = { - parameterPath: ["options", "immutabilityPolicyMode"], - mapper: { - serializedName: "x-ms-immutability-policy-mode", - xmlName: "x-ms-immutability-policy-mode", - type: { - name: "Enum", - allowedValues: ["Mutable", "Unlocked", "Locked"] - } - } +const BlockBlobGetBlockListExceptionHeaders = { + serializedName: "BlockBlob_getBlockListExceptionHeaders", + type: { + name: "Composite", + className: "BlockBlobGetBlockListExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String", + }, + }, + }, + }, }; -const comp13 = { - parameterPath: "comp", + +var Mappers = /*#__PURE__*/Object.freeze({ + __proto__: null, + AccessPolicy: AccessPolicy, + AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, + AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, + AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, + AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, + AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, + AppendBlobCreateHeaders: AppendBlobCreateHeaders, + AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, + AppendBlobSealHeaders: AppendBlobSealHeaders, + ArrowConfiguration: ArrowConfiguration, + ArrowField: ArrowField, + BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, + BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, + BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, + BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, + BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, + BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, + BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, + BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, + BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, + BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, + BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, + BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, + BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, + BlobDeleteHeaders: BlobDeleteHeaders, + BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, + BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, + BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, + BlobDownloadHeaders: BlobDownloadHeaders, + BlobFlatListSegment: BlobFlatListSegment, + BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, + BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, + BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, + BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, + BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, + BlobGetTagsHeaders: BlobGetTagsHeaders, + BlobHierarchyListSegment: BlobHierarchyListSegment, + BlobItemInternal: BlobItemInternal, + BlobName: BlobName, + BlobPrefix: BlobPrefix, + BlobPropertiesInternal: BlobPropertiesInternal, + BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, + BlobQueryHeaders: BlobQueryHeaders, + BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, + BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, + BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, + BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, + BlobServiceProperties: BlobServiceProperties, + BlobServiceStatistics: BlobServiceStatistics, + BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, + BlobSetExpiryHeaders: BlobSetExpiryHeaders, + BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, + BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, + BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, + BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, + BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, + BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, + BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, + BlobSetMetadataHeaders: BlobSetMetadataHeaders, + BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, + BlobSetTagsHeaders: BlobSetTagsHeaders, + BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, + BlobSetTierHeaders: BlobSetTierHeaders, + BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, + BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, + BlobTag: BlobTag, + BlobTags: BlobTags, + BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, + BlobUndeleteHeaders: BlobUndeleteHeaders, + Block: Block, + BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, + BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, + BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders, + BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, + BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, + BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, + BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, + BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, + BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, + BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, + BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, + BlockBlobUploadHeaders: BlockBlobUploadHeaders, + BlockList: BlockList, + BlockLookupList: BlockLookupList, + ClearRange: ClearRange, + ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, + ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, + ContainerBreakLeaseExceptionHeaders: ContainerBreakLeaseExceptionHeaders, + ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, + ContainerChangeLeaseExceptionHeaders: ContainerChangeLeaseExceptionHeaders, + ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, + ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, + ContainerCreateHeaders: ContainerCreateHeaders, + ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, + ContainerDeleteHeaders: ContainerDeleteHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, + ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, + ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, + ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, + ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, + ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, + ContainerItem: ContainerItem, + ContainerListBlobFlatSegmentExceptionHeaders: ContainerListBlobFlatSegmentExceptionHeaders, + ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, + ContainerListBlobHierarchySegmentExceptionHeaders: ContainerListBlobHierarchySegmentExceptionHeaders, + ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, + ContainerProperties: ContainerProperties, + ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, + ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, + ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, + ContainerRenameHeaders: ContainerRenameHeaders, + ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, + ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, + ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, + ContainerRestoreHeaders: ContainerRestoreHeaders, + ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, + ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, + ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, + ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, + ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, + CorsRule: CorsRule, + DelimitedTextConfiguration: DelimitedTextConfiguration, + FilterBlobItem: FilterBlobItem, + FilterBlobSegment: FilterBlobSegment, + GeoReplication: GeoReplication, + JsonTextConfiguration: JsonTextConfiguration, + KeyInfo: KeyInfo, + ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, + ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, + ListContainersSegmentResponse: ListContainersSegmentResponse, + Logging: Logging, + Metrics: Metrics, + PageBlobClearPagesExceptionHeaders: PageBlobClearPagesExceptionHeaders, + PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, + PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, + PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, + PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, + PageBlobCreateHeaders: PageBlobCreateHeaders, + PageBlobGetPageRangesDiffExceptionHeaders: PageBlobGetPageRangesDiffExceptionHeaders, + PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, + PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, + PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, + PageBlobResizeExceptionHeaders: PageBlobResizeExceptionHeaders, + PageBlobResizeHeaders: PageBlobResizeHeaders, + PageBlobUpdateSequenceNumberExceptionHeaders: PageBlobUpdateSequenceNumberExceptionHeaders, + PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, + PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, + PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, + PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, + PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, + PageList: PageList, + PageRange: PageRange, + QueryFormat: QueryFormat, + QueryRequest: QueryRequest, + QuerySerialization: QuerySerialization, + RetentionPolicy: RetentionPolicy, + ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, + ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, + ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, + ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, + ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, + ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, + ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, + ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, + ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, + ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, + ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, + ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, + ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, + ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, + ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, + ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, + SignedIdentifier: SignedIdentifier, + StaticWebsite: StaticWebsite, + StorageError: StorageError, + UserDelegationKey: UserDelegationKey +}); + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +const contentType = { + parameterPath: ["options", "contentType"], mapper: { - defaultValue: "legalhold", + defaultValue: "application/xml", isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } -}; -const legalHold = { - parameterPath: "legalHold", - mapper: { - serializedName: "x-ms-legal-hold", - required: true, - xmlName: "x-ms-legal-hold", + serializedName: "Content-Type", type: { - name: "Boolean" - } - } + name: "String", + }, + }, }; -const encryptionScope = { - parameterPath: ["options", "encryptionScope"], - mapper: { - serializedName: "x-ms-encryption-scope", - xmlName: "x-ms-encryption-scope", - type: { - name: "String" - } - } +const blobServiceProperties = { + parameterPath: "blobServiceProperties", + mapper: BlobServiceProperties, }; -const comp14 = { - parameterPath: "comp", +const accept = { + parameterPath: "accept", mapper: { - defaultValue: "snapshot", + defaultValue: "application/xml", isConstant: true, - serializedName: "comp", - type: { - name: "String" - } - } -}; -const tier = { - parameterPath: ["options", "tier"], - mapper: { - serializedName: "x-ms-access-tier", - xmlName: "x-ms-access-tier", - type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] - } - } -}; -const rehydratePriority = { - parameterPath: ["options", "rehydratePriority"], - mapper: { - serializedName: "x-ms-rehydrate-priority", - xmlName: "x-ms-rehydrate-priority", - type: { - name: "Enum", - allowedValues: ["High", "Standard"] - } - } -}; -const sourceIfModifiedSince = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfModifiedSince" - ], - mapper: { - serializedName: "x-ms-source-if-modified-since", - xmlName: "x-ms-source-if-modified-since", - type: { - name: "DateTimeRfc1123" - } - } -}; -const sourceIfUnmodifiedSince = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfUnmodifiedSince" - ], - mapper: { - serializedName: "x-ms-source-if-unmodified-since", - xmlName: "x-ms-source-if-unmodified-since", - type: { - name: "DateTimeRfc1123" - } - } -}; -const sourceIfMatch = { - parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], - mapper: { - serializedName: "x-ms-source-if-match", - xmlName: "x-ms-source-if-match", - type: { - name: "String" - } - } -}; -const sourceIfNoneMatch = { - parameterPath: [ - "options", - "sourceModifiedAccessConditions", - "sourceIfNoneMatch" - ], - mapper: { - serializedName: "x-ms-source-if-none-match", - xmlName: "x-ms-source-if-none-match", - type: { - name: "String" - } - } -}; -const sourceIfTags = { - parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], - mapper: { - serializedName: "x-ms-source-if-tags", - xmlName: "x-ms-source-if-tags", + serializedName: "Accept", type: { - name: "String" - } - } + name: "String", + }, + }, }; -const copySource = { - parameterPath: "copySource", +const url = { + parameterPath: "url", mapper: { - serializedName: "x-ms-copy-source", + serializedName: "url", required: true, - xmlName: "x-ms-copy-source", - type: { - name: "String" - } - } -}; -const blobTagsString = { - parameterPath: ["options", "blobTagsString"], - mapper: { - serializedName: "x-ms-tags", - xmlName: "x-ms-tags", - type: { - name: "String" - } - } -}; -const sealBlob = { - parameterPath: ["options", "sealBlob"], - mapper: { - serializedName: "x-ms-seal-blob", - xmlName: "x-ms-seal-blob", - type: { - name: "Boolean" - } - } -}; -const legalHold1 = { - parameterPath: ["options", "legalHold"], - mapper: { - serializedName: "x-ms-legal-hold", - xmlName: "x-ms-legal-hold", + xmlName: "url", type: { - name: "Boolean" - } - } + name: "String", + }, + }, + skipEncoding: true, }; -const xMsRequiresSync = { - parameterPath: "xMsRequiresSync", +const restype = { + parameterPath: "restype", mapper: { - defaultValue: "true", + defaultValue: "service", isConstant: true, - serializedName: "x-ms-requires-sync", - type: { - name: "String" - } - } -}; -const sourceContentMD5 = { - parameterPath: ["options", "sourceContentMD5"], - mapper: { - serializedName: "x-ms-source-content-md5", - xmlName: "x-ms-source-content-md5", - type: { - name: "ByteArray" - } - } -}; -const copySourceAuthorization = { - parameterPath: ["options", "copySourceAuthorization"], - mapper: { - serializedName: "x-ms-copy-source-authorization", - xmlName: "x-ms-copy-source-authorization", - type: { - name: "String" - } - } -}; -const copySourceTags = { - parameterPath: ["options", "copySourceTags"], - mapper: { - serializedName: "x-ms-copy-source-tag-option", - xmlName: "x-ms-copy-source-tag-option", + serializedName: "restype", type: { - name: "Enum", - allowedValues: ["REPLACE", "COPY"] - } - } + name: "String", + }, + }, }; -const comp15 = { +const comp = { parameterPath: "comp", mapper: { - defaultValue: "copy", + defaultValue: "properties", isConstant: true, serializedName: "comp", type: { - name: "String" - } - } -}; -const copyActionAbortConstant = { - parameterPath: "copyActionAbortConstant", - mapper: { - defaultValue: "abort", - isConstant: true, - serializedName: "x-ms-copy-action", - type: { - name: "String" - } - } + name: "String", + }, + }, }; -const copyId = { - parameterPath: "copyId", +const timeoutInSeconds = { + parameterPath: ["options", "timeoutInSeconds"], mapper: { - serializedName: "copyid", - required: true, - xmlName: "copyid", + constraints: { + InclusiveMinimum: 0, + }, + serializedName: "timeout", + xmlName: "timeout", type: { - name: "String" - } - } + name: "Number", + }, + }, }; -const comp16 = { - parameterPath: "comp", +const version = { + parameterPath: "version", mapper: { - defaultValue: "tier", + defaultValue: "2024-05-04", isConstant: true, - serializedName: "comp", + serializedName: "x-ms-version", type: { - name: "String" - } - } + name: "String", + }, + }, }; -const tier1 = { - parameterPath: "tier", +const requestId = { + parameterPath: ["options", "requestId"], mapper: { - serializedName: "x-ms-access-tier", - required: true, - xmlName: "x-ms-access-tier", + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", type: { - name: "Enum", - allowedValues: [ - "P4", - "P6", - "P10", - "P15", - "P20", - "P30", - "P40", - "P50", - "P60", - "P70", - "P80", - "Hot", - "Cool", - "Archive", - "Cold" - ] - } - } + name: "String", + }, + }, }; -const queryRequest = { - parameterPath: ["options", "queryRequest"], - mapper: QueryRequest +const accept1 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String", + }, + }, }; -const comp17 = { +const comp1 = { parameterPath: "comp", mapper: { - defaultValue: "query", + defaultValue: "stats", isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; -const comp18 = { +const comp2 = { parameterPath: "comp", mapper: { - defaultValue: "tags", + defaultValue: "list", isConstant: true, serializedName: "comp", type: { - name: "String" - } - } -}; -const tags = { - parameterPath: ["options", "tags"], - mapper: BlobTags + name: "String", + }, + }, }; -const transactionalContentMD5 = { - parameterPath: ["options", "transactionalContentMD5"], +const prefix = { + parameterPath: ["options", "prefix"], mapper: { - serializedName: "Content-MD5", - xmlName: "Content-MD5", + serializedName: "prefix", + xmlName: "prefix", type: { - name: "ByteArray" - } - } + name: "String", + }, + }, }; -const transactionalContentCrc64 = { - parameterPath: ["options", "transactionalContentCrc64"], +const marker = { + parameterPath: ["options", "marker"], mapper: { - serializedName: "x-ms-content-crc64", - xmlName: "x-ms-content-crc64", + serializedName: "marker", + xmlName: "marker", type: { - name: "ByteArray" - } - } + name: "String", + }, + }, }; -const blobType = { - parameterPath: "blobType", +const maxPageSize = { + parameterPath: ["options", "maxPageSize"], mapper: { - defaultValue: "PageBlob", - isConstant: true, - serializedName: "x-ms-blob-type", + constraints: { + InclusiveMinimum: 1, + }, + serializedName: "maxresults", + xmlName: "maxresults", type: { - name: "String" - } - } + name: "Number", + }, + }, }; -const blobContentLength = { - parameterPath: "blobContentLength", +const include = { + parameterPath: ["options", "include"], mapper: { - serializedName: "x-ms-blob-content-length", - required: true, - xmlName: "x-ms-blob-content-length", + serializedName: "include", + xmlName: "include", + xmlElementName: "ListContainersIncludeType", type: { - name: "Number" - } - } + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: ["metadata", "deleted", "system"], + }, + }, + }, + }, + collectionFormat: "CSV", }; -const blobSequenceNumber = { - parameterPath: ["options", "blobSequenceNumber"], +const keyInfo = { + parameterPath: "keyInfo", + mapper: KeyInfo, +}; +const comp3 = { + parameterPath: "comp", mapper: { - serializedName: "x-ms-blob-sequence-number", - xmlName: "x-ms-blob-sequence-number", + defaultValue: "userdelegationkey", + isConstant: true, + serializedName: "comp", type: { - name: "Number" - } - } + name: "String", + }, + }, }; -const contentType1 = { - parameterPath: ["options", "contentType"], +const restype1 = { + parameterPath: "restype", mapper: { - defaultValue: "application/octet-stream", + defaultValue: "account", isConstant: true, - serializedName: "Content-Type", + serializedName: "restype", type: { - name: "String" - } - } + name: "String", + }, + }, }; -const body1 = { +const body = { parameterPath: "body", mapper: { serializedName: "body", required: true, xmlName: "body", type: { - name: "Stream" - } - } -}; -const accept2 = { - parameterPath: "accept", - mapper: { - defaultValue: "application/xml", - isConstant: true, - serializedName: "Accept", - type: { - name: "String" - } - } + name: "Stream", + }, + }, }; -const comp19 = { +const comp4 = { parameterPath: "comp", mapper: { - defaultValue: "page", + defaultValue: "batch", isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; -const pageWrite = { - parameterPath: "pageWrite", +const contentLength = { + parameterPath: "contentLength", mapper: { - defaultValue: "update", - isConstant: true, - serializedName: "x-ms-page-write", + serializedName: "Content-Length", + required: true, + xmlName: "Content-Length", type: { - name: "String" - } - } + name: "Number", + }, + }, }; -const ifSequenceNumberLessThanOrEqualTo = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberLessThanOrEqualTo" - ], +const multipartContentType = { + parameterPath: "multipartContentType", mapper: { - serializedName: "x-ms-if-sequence-number-le", - xmlName: "x-ms-if-sequence-number-le", + serializedName: "Content-Type", + required: true, + xmlName: "Content-Type", type: { - name: "Number" - } - } + name: "String", + }, + }, }; -const ifSequenceNumberLessThan = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberLessThan" - ], +const comp5 = { + parameterPath: "comp", mapper: { - serializedName: "x-ms-if-sequence-number-lt", - xmlName: "x-ms-if-sequence-number-lt", + defaultValue: "blobs", + isConstant: true, + serializedName: "comp", type: { - name: "Number" - } - } + name: "String", + }, + }, }; -const ifSequenceNumberEqualTo = { - parameterPath: [ - "options", - "sequenceNumberAccessConditions", - "ifSequenceNumberEqualTo" - ], +const where = { + parameterPath: ["options", "where"], mapper: { - serializedName: "x-ms-if-sequence-number-eq", - xmlName: "x-ms-if-sequence-number-eq", + serializedName: "where", + xmlName: "where", type: { - name: "Number" - } - } + name: "String", + }, + }, }; -const pageWrite1 = { - parameterPath: "pageWrite", +const restype2 = { + parameterPath: "restype", mapper: { - defaultValue: "clear", + defaultValue: "container", isConstant: true, - serializedName: "x-ms-page-write", + serializedName: "restype", type: { - name: "String" - } - } + name: "String", + }, + }, }; -const sourceUrl = { - parameterPath: "sourceUrl", +const metadata = { + parameterPath: ["options", "metadata"], mapper: { - serializedName: "x-ms-copy-source", - required: true, - xmlName: "x-ms-copy-source", + serializedName: "x-ms-meta", + xmlName: "x-ms-meta", + headerCollectionPrefix: "x-ms-meta-", type: { - name: "String" - } - } + name: "Dictionary", + value: { type: { name: "String" } }, + }, + }, }; -const sourceRange = { - parameterPath: "sourceRange", +const access = { + parameterPath: ["options", "access"], mapper: { - serializedName: "x-ms-source-range", - required: true, - xmlName: "x-ms-source-range", + serializedName: "x-ms-blob-public-access", + xmlName: "x-ms-blob-public-access", type: { - name: "String" - } - } + name: "Enum", + allowedValues: ["container", "blob"], + }, + }, }; -const sourceContentCrc64 = { - parameterPath: ["options", "sourceContentCrc64"], +const defaultEncryptionScope = { + parameterPath: [ + "options", + "containerEncryptionScope", + "defaultEncryptionScope", + ], mapper: { - serializedName: "x-ms-source-content-crc64", - xmlName: "x-ms-source-content-crc64", + serializedName: "x-ms-default-encryption-scope", + xmlName: "x-ms-default-encryption-scope", type: { - name: "ByteArray" - } - } + name: "String", + }, + }, }; -const range1 = { - parameterPath: "range", +const preventEncryptionScopeOverride = { + parameterPath: [ + "options", + "containerEncryptionScope", + "preventEncryptionScopeOverride", + ], mapper: { - serializedName: "x-ms-range", - required: true, - xmlName: "x-ms-range", + serializedName: "x-ms-deny-encryption-scope-override", + xmlName: "x-ms-deny-encryption-scope-override", type: { - name: "String" - } - } + name: "Boolean", + }, + }, }; -const comp20 = { - parameterPath: "comp", +const leaseId = { + parameterPath: ["options", "leaseAccessConditions", "leaseId"], mapper: { - defaultValue: "pagelist", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-lease-id", + xmlName: "x-ms-lease-id", type: { - name: "String" - } - } + name: "String", + }, + }, }; -const prevsnapshot = { - parameterPath: ["options", "prevsnapshot"], +const ifModifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], mapper: { - serializedName: "prevsnapshot", - xmlName: "prevsnapshot", + serializedName: "If-Modified-Since", + xmlName: "If-Modified-Since", type: { - name: "String" - } - } + name: "DateTimeRfc1123", + }, + }, }; -const prevSnapshotUrl = { - parameterPath: ["options", "prevSnapshotUrl"], +const ifUnmodifiedSince = { + parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], mapper: { - serializedName: "x-ms-previous-snapshot-url", - xmlName: "x-ms-previous-snapshot-url", + serializedName: "If-Unmodified-Since", + xmlName: "If-Unmodified-Since", type: { - name: "String" - } - } + name: "DateTimeRfc1123", + }, + }, }; -const sequenceNumberAction = { - parameterPath: "sequenceNumberAction", +const comp6 = { + parameterPath: "comp", mapper: { - serializedName: "x-ms-sequence-number-action", - required: true, - xmlName: "x-ms-sequence-number-action", + defaultValue: "metadata", + isConstant: true, + serializedName: "comp", type: { - name: "Enum", - allowedValues: ["max", "update", "increment"] - } - } + name: "String", + }, + }, }; -const comp21 = { +const comp7 = { parameterPath: "comp", mapper: { - defaultValue: "incrementalcopy", + defaultValue: "acl", isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; -const blobType1 = { - parameterPath: "blobType", +const containerAcl = { + parameterPath: ["options", "containerAcl"], mapper: { - defaultValue: "AppendBlob", - isConstant: true, - serializedName: "x-ms-blob-type", + serializedName: "containerAcl", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", type: { - name: "String" - } - } + name: "Sequence", + element: { + type: { + name: "Composite", + className: "SignedIdentifier", + }, + }, + }, + }, }; -const comp22 = { +const comp8 = { parameterPath: "comp", mapper: { - defaultValue: "appendblock", + defaultValue: "undelete", isConstant: true, serializedName: "comp", type: { - name: "String" - } - } -}; -const maxSize = { - parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], - mapper: { - serializedName: "x-ms-blob-condition-maxsize", - xmlName: "x-ms-blob-condition-maxsize", - type: { - name: "Number" - } - } + name: "String", + }, + }, }; -const appendPosition = { - parameterPath: [ - "options", - "appendPositionAccessConditions", - "appendPosition" - ], +const deletedContainerName = { + parameterPath: ["options", "deletedContainerName"], mapper: { - serializedName: "x-ms-blob-condition-appendpos", - xmlName: "x-ms-blob-condition-appendpos", + serializedName: "x-ms-deleted-container-name", + xmlName: "x-ms-deleted-container-name", type: { - name: "Number" - } - } + name: "String", + }, + }, }; -const sourceRange1 = { - parameterPath: ["options", "sourceRange"], +const deletedContainerVersion = { + parameterPath: ["options", "deletedContainerVersion"], mapper: { - serializedName: "x-ms-source-range", - xmlName: "x-ms-source-range", + serializedName: "x-ms-deleted-container-version", + xmlName: "x-ms-deleted-container-version", type: { - name: "String" - } - } + name: "String", + }, + }, }; -const comp23 = { +const comp9 = { parameterPath: "comp", mapper: { - defaultValue: "seal", + defaultValue: "rename", isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; -const blobType2 = { - parameterPath: "blobType", +const sourceContainerName = { + parameterPath: "sourceContainerName", mapper: { - defaultValue: "BlockBlob", - isConstant: true, - serializedName: "x-ms-blob-type", + serializedName: "x-ms-source-container-name", + required: true, + xmlName: "x-ms-source-container-name", type: { - name: "String" - } - } + name: "String", + }, + }, }; -const copySourceBlobProperties = { - parameterPath: ["options", "copySourceBlobProperties"], +const sourceLeaseId = { + parameterPath: ["options", "sourceLeaseId"], mapper: { - serializedName: "x-ms-copy-source-blob-properties", - xmlName: "x-ms-copy-source-blob-properties", + serializedName: "x-ms-source-lease-id", + xmlName: "x-ms-source-lease-id", type: { - name: "Boolean" - } - } + name: "String", + }, + }, }; -const comp24 = { +const comp10 = { parameterPath: "comp", mapper: { - defaultValue: "block", + defaultValue: "lease", isConstant: true, serializedName: "comp", type: { - name: "String" - } - } + name: "String", + }, + }, }; -const blockId = { - parameterPath: "blockId", +const action = { + parameterPath: "action", mapper: { - serializedName: "blockid", - required: true, - xmlName: "blockid", + defaultValue: "acquire", + isConstant: true, + serializedName: "x-ms-lease-action", type: { - name: "String" - } - } -}; -const blocks = { - parameterPath: "blocks", - mapper: BlockLookupList + name: "String", + }, + }, }; -const comp25 = { - parameterPath: "comp", +const duration = { + parameterPath: ["options", "duration"], mapper: { - defaultValue: "blocklist", - isConstant: true, - serializedName: "comp", + serializedName: "x-ms-lease-duration", + xmlName: "x-ms-lease-duration", type: { - name: "String" - } - } + name: "Number", + }, + }, }; -const listType = { - parameterPath: "listType", +const proposedLeaseId = { + parameterPath: ["options", "proposedLeaseId"], mapper: { - defaultValue: "committed", - serializedName: "blocklisttype", - required: true, - xmlName: "blocklisttype", + serializedName: "x-ms-proposed-lease-id", + xmlName: "x-ms-proposed-lease-id", type: { - name: "Enum", - allowedValues: ["committed", "uncommitted", "all"] - } - } -}; - -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -/** Class representing a Service. */ -class Service { - /** - * Initialize a new instance of the class Service class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; - } - /** - * Sets properties for a storage account's Blob service endpoint, including properties for Storage - * Analytics and CORS (Cross-Origin Resource Sharing) rules - * @param blobServiceProperties The StorageService properties. - * @param options The options parameters. - */ - setProperties(blobServiceProperties, options) { - const operationArguments = { - blobServiceProperties, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); - } - /** - * gets the properties of a storage account's Blob service, including properties for Storage Analytics - * and CORS (Cross-Origin Resource Sharing) rules. - * @param options The options parameters. - */ - getProperties(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); - } - /** - * Retrieves statistics related to replication for the Blob service. It is only available on the - * secondary location endpoint when read-access geo-redundant replication is enabled for the storage - * account. - * @param options The options parameters. - */ - getStatistics(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); - } - /** - * The List Containers Segment operation returns a list of the containers under the specified account - * @param options The options parameters. - */ - listContainersSegment(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); - } - /** - * Retrieves a user delegation key for the Blob service. This is only a valid operation when using - * bearer token authentication. - * @param keyInfo Key information - * @param options The options parameters. - */ - getUserDelegationKey(keyInfo, options) { - const operationArguments = { - keyInfo, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); - } - /** - * Returns the sku name and account kind - * @param options The options parameters. - */ - getAccountInfo(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); - } - /** - * The Batch operation allows multiple API calls to be embedded into a single HTTP request. - * @param contentLength The length of the request. - * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch - * boundary. Example header value: multipart/mixed; boundary=batch_ - * @param body Initial data - * @param options The options parameters. - */ - submitBatch(contentLength, multipartContentType, body, options) { - const operationArguments = { - contentLength, - multipartContentType, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); - } - /** - * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a - * given search expression. Filter blobs searches across all containers within a storage account but - * can be scoped within the expression to a single container. - * @param options The options parameters. - */ - filterBlobs(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); - } -} -// Operation Specifications -const xmlSerializer$5 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); -const setPropertiesOperationSpec = { - path: "/", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: ServiceSetPropertiesHeaders + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceSetPropertiesExceptionHeaders - } }, - requestBody: blobServiceProperties, - queryParameters: [ - restype, - comp, - timeoutInSeconds - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$5 }; -const getPropertiesOperationSpec$2 = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: BlobServiceProperties, - headersMapper: ServiceGetPropertiesHeaders +const action1 = { + parameterPath: "action", + mapper: { + defaultValue: "release", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceGetPropertiesExceptionHeaders - } }, - queryParameters: [ - restype, - comp, - timeoutInSeconds - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$5 }; -const getStatisticsOperationSpec = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: BlobServiceStatistics, - headersMapper: ServiceGetStatisticsHeaders +const leaseId1 = { + parameterPath: "leaseId", + mapper: { + serializedName: "x-ms-lease-id", + required: true, + xmlName: "x-ms-lease-id", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceGetStatisticsExceptionHeaders - } }, - queryParameters: [ - restype, - timeoutInSeconds, - comp1 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$5 }; -const listContainersSegmentOperationSpec = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: ListContainersSegmentResponse, - headersMapper: ServiceListContainersSegmentHeaders +const action2 = { + parameterPath: "action", + mapper: { + defaultValue: "renew", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceListContainersSegmentExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - comp2, - prefix, - marker, - maxPageSize, - include - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$5 }; -const getUserDelegationKeyOperationSpec = { - path: "/", - httpMethod: "POST", - responses: { - 200: { - bodyMapper: UserDelegationKey, - headersMapper: ServiceGetUserDelegationKeyHeaders +const action3 = { + parameterPath: "action", + mapper: { + defaultValue: "break", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceGetUserDelegationKeyExceptionHeaders - } }, - requestBody: keyInfo, - queryParameters: [ - restype, - timeoutInSeconds, - comp3 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$5 }; -const getAccountInfoOperationSpec$2 = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - headersMapper: ServiceGetAccountInfoHeaders +const breakPeriod = { + parameterPath: ["options", "breakPeriod"], + mapper: { + serializedName: "x-ms-lease-break-period", + xmlName: "x-ms-lease-break-period", + type: { + name: "Number", }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceGetAccountInfoExceptionHeaders - } - }, - queryParameters: [comp, restype1], - urlParameters: [url], - headerParameters: [version, accept1], - isXML: true, - serializer: xmlSerializer$5 + }, }; -const submitBatchOperationSpec$1 = { - path: "/", - httpMethod: "POST", - responses: { - 202: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: ServiceSubmitBatchHeaders +const action4 = { + parameterPath: "action", + mapper: { + defaultValue: "change", + isConstant: true, + serializedName: "x-ms-lease-action", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceSubmitBatchExceptionHeaders - } }, - requestBody: body, - queryParameters: [timeoutInSeconds, comp4], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - contentLength, - multipartContentType - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$5 }; -const filterBlobsOperationSpec$1 = { - path: "/", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: FilterBlobSegment, - headersMapper: ServiceFilterBlobsHeaders +const proposedLeaseId1 = { + parameterPath: "proposedLeaseId", + mapper: { + serializedName: "x-ms-proposed-lease-id", + required: true, + xmlName: "x-ms-proposed-lease-id", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ServiceFilterBlobsExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - comp5, - where - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$5 }; - -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -/** Class representing a Container. */ -class Container { - /** - * Initialize a new instance of the class Container class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; - } - /** - * creates a new container under the specified account. If the container with the same name already - * exists, the operation fails - * @param options The options parameters. - */ - create(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); - } - /** - * returns all user-defined metadata and system properties for the specified container. The data - * returned does not include the container's list of blobs - * @param options The options parameters. - */ - getProperties(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); - } - /** - * operation marks the specified container for deletion. The container and any blobs contained within - * it are later deleted during garbage collection - * @param options The options parameters. - */ - delete(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); - } - /** - * operation sets one or more user-defined name-value pairs for the specified container. - * @param options The options parameters. - */ - setMetadata(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); - } - /** - * gets the permissions for the specified container. The permissions indicate whether container data - * may be accessed publicly. - * @param options The options parameters. - */ - getAccessPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); - } - /** - * sets the permissions for the specified container. The permissions indicate whether blobs in a - * container may be accessed publicly. - * @param options The options parameters. - */ - setAccessPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); - } - /** - * Restores a previously-deleted container. - * @param options The options parameters. - */ - restore(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); - } - /** - * Renames an existing container. - * @param sourceContainerName Required. Specifies the name of the container to rename. - * @param options The options parameters. - */ - rename(sourceContainerName, options) { - const operationArguments = { - sourceContainerName, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renameOperationSpec); - } - /** - * The Batch operation allows multiple API calls to be embedded into a single HTTP request. - * @param contentLength The length of the request. - * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch - * boundary. Example header value: multipart/mixed; boundary=batch_ - * @param body Initial data - * @param options The options parameters. - */ - submitBatch(contentLength, multipartContentType, body, options) { - const operationArguments = { - contentLength, - multipartContentType, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); - } - /** - * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given - * search expression. Filter blobs searches within the given container. - * @param options The options parameters. - */ - filterBlobs(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); - } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param options The options parameters. - */ - acquireLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); - } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - releaseLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); - } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - renewLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); - } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param options The options parameters. - */ - breakLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); - } - /** - * [Update] establishes and manages a lock on a container for delete operations. The lock duration can - * be 15 to 60 seconds, or can be infinite - * @param leaseId Specifies the current lease ID on the resource. - * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 - * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor - * (String) for a list of valid GUID string formats. - * @param options The options parameters. - */ - changeLease(leaseId, proposedLeaseId, options) { - const operationArguments = { - leaseId, - proposedLeaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); - } - /** - * [Update] The List Blobs operation returns a list of the blobs under the specified container - * @param options The options parameters. - */ - listBlobFlatSegment(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); - } - /** - * [Update] The List Blobs operation returns a list of the blobs under the specified container - * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix - * element in the response body that acts as a placeholder for all blobs whose names begin with the - * same substring up to the appearance of the delimiter character. The delimiter may be a single - * character or a string. - * @param options The options parameters. - */ - listBlobHierarchySegment(delimiter, options) { - const operationArguments = { - delimiter, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); - } - /** - * Returns the sku name and account kind - * @param options The options parameters. - */ - getAccountInfo(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); - } -} -// Operation Specifications -const xmlSerializer$4 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); -const createOperationSpec$2 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: ContainerCreateHeaders +const include1 = { + parameterPath: ["options", "include"], + mapper: { + serializedName: "include", + xmlName: "include", + xmlElementName: "ListBlobsIncludeItem", + type: { + name: "Sequence", + element: { + type: { + name: "Enum", + allowedValues: [ + "copy", + "deleted", + "metadata", + "snapshots", + "uncommittedblobs", + "versions", + "tags", + "immutabilitypolicy", + "legalhold", + "deletedwithversions", + ], + }, + }, }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerCreateExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, restype2], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - access, - defaultEncryptionScope, - preventEncryptionScopeOverride - ], - isXML: true, - serializer: xmlSerializer$4 + collectionFormat: "CSV", }; -const getPropertiesOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: ContainerGetPropertiesHeaders +const delimiter = { + parameterPath: "delimiter", + mapper: { + serializedName: "delimiter", + required: true, + xmlName: "delimiter", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerGetPropertiesExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, restype2], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId - ], - isXML: true, - serializer: xmlSerializer$4 }; -const deleteOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "DELETE", - responses: { - 202: { - headersMapper: ContainerDeleteHeaders +const snapshot = { + parameterPath: ["options", "snapshot"], + mapper: { + serializedName: "snapshot", + xmlName: "snapshot", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerDeleteExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, restype2], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince - ], - isXML: true, - serializer: xmlSerializer$4 }; -const setMetadataOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerSetMetadataHeaders +const versionId = { + parameterPath: ["options", "versionId"], + mapper: { + serializedName: "versionid", + xmlName: "versionid", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerSetMetadataExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp6 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince - ], - isXML: true, - serializer: xmlSerializer$4 }; -const getAccessPolicyOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: { - type: { - name: "Sequence", - element: { - type: { name: "Composite", className: "SignedIdentifier" } - } - }, - serializedName: "SignedIdentifiers", - xmlName: "SignedIdentifiers", - xmlIsWrapped: true, - xmlElementName: "SignedIdentifier" - }, - headersMapper: ContainerGetAccessPolicyHeaders +const range = { + parameterPath: ["options", "range"], + mapper: { + serializedName: "x-ms-range", + xmlName: "x-ms-range", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerGetAccessPolicyExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp7 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId - ], - isXML: true, - serializer: xmlSerializer$4 }; -const setAccessPolicyOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerSetAccessPolicyHeaders +const rangeGetContentMD5 = { + parameterPath: ["options", "rangeGetContentMD5"], + mapper: { + serializedName: "x-ms-range-get-content-md5", + xmlName: "x-ms-range-get-content-md5", + type: { + name: "Boolean", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerSetAccessPolicyExceptionHeaders - } }, - requestBody: containerAcl, - queryParameters: [ - timeoutInSeconds, - restype2, - comp7 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - access, - leaseId, - ifModifiedSince, - ifUnmodifiedSince - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$4 }; -const restoreOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: ContainerRestoreHeaders +const rangeGetContentCRC64 = { + parameterPath: ["options", "rangeGetContentCRC64"], + mapper: { + serializedName: "x-ms-range-get-content-crc64", + xmlName: "x-ms-range-get-content-crc64", + type: { + name: "Boolean", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerRestoreExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp8 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - deletedContainerName, - deletedContainerVersion - ], - isXML: true, - serializer: xmlSerializer$4 }; -const renameOperationSpec = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerRenameHeaders +const encryptionKey = { + parameterPath: ["options", "cpkInfo", "encryptionKey"], + mapper: { + serializedName: "x-ms-encryption-key", + xmlName: "x-ms-encryption-key", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerRenameExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp9 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - sourceContainerName, - sourceLeaseId - ], - isXML: true, - serializer: xmlSerializer$4 }; -const submitBatchOperationSpec = { - path: "/{containerName}", - httpMethod: "POST", - responses: { - 202: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: ContainerSubmitBatchHeaders +const encryptionKeySha256 = { + parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], + mapper: { + serializedName: "x-ms-encryption-key-sha256", + xmlName: "x-ms-encryption-key-sha256", + type: { + name: "String", + }, + }, +}; +const encryptionAlgorithm = { + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], + mapper: { + serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", + type: { + name: "String", + }, + }, +}; +const ifMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], + mapper: { + serializedName: "If-Match", + xmlName: "If-Match", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerSubmitBatchExceptionHeaders - } }, - requestBody: body, - queryParameters: [ - timeoutInSeconds, - comp4, - restype2 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - contentLength, - multipartContentType - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$4 }; -const filterBlobsOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: FilterBlobSegment, - headersMapper: ContainerFilterBlobsHeaders +const ifNoneMatch = { + parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], + mapper: { + serializedName: "If-None-Match", + xmlName: "If-None-Match", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerFilterBlobsExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - comp5, - where, - restype2 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$4 }; -const acquireLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: ContainerAcquireLeaseHeaders +const ifTags = { + parameterPath: ["options", "modifiedAccessConditions", "ifTags"], + mapper: { + serializedName: "x-ms-if-tags", + xmlName: "x-ms-if-tags", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerAcquireLeaseExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action, - duration, - proposedLeaseId - ], - isXML: true, - serializer: xmlSerializer$4 }; -const releaseLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerReleaseLeaseHeaders +const deleteSnapshots = { + parameterPath: ["options", "deleteSnapshots"], + mapper: { + serializedName: "x-ms-delete-snapshots", + xmlName: "x-ms-delete-snapshots", + type: { + name: "Enum", + allowedValues: ["include", "only"], }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerReleaseLeaseExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action1, - leaseId1 - ], - isXML: true, - serializer: xmlSerializer$4 }; -const renewLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerRenewLeaseHeaders +const blobDeleteType = { + parameterPath: ["options", "blobDeleteType"], + mapper: { + serializedName: "deletetype", + xmlName: "deletetype", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerRenewLeaseExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action2 - ], - isXML: true, - serializer: xmlSerializer$4 }; -const breakLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: ContainerBreakLeaseHeaders +const comp11 = { + parameterPath: "comp", + mapper: { + defaultValue: "expiry", + isConstant: true, + serializedName: "comp", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerBreakLeaseExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action3, - breakPeriod - ], - isXML: true, - serializer: xmlSerializer$4 }; -const changeLeaseOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: ContainerChangeLeaseHeaders +const expiryOptions = { + parameterPath: "expiryOptions", + mapper: { + serializedName: "x-ms-expiry-option", + required: true, + xmlName: "x-ms-expiry-option", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerChangeLeaseExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - restype2, - comp10 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action4, - proposedLeaseId1 - ], - isXML: true, - serializer: xmlSerializer$4 }; -const listBlobFlatSegmentOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: ListBlobsFlatSegmentResponse, - headersMapper: ContainerListBlobFlatSegmentHeaders +const expiresOn = { + parameterPath: ["options", "expiresOn"], + mapper: { + serializedName: "x-ms-expiry-time", + xmlName: "x-ms-expiry-time", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerListBlobFlatSegmentExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - comp2, - prefix, - marker, - maxPageSize, - restype2, - include1 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$4 }; -const listBlobHierarchySegmentOperationSpec = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: ListBlobsHierarchySegmentResponse, - headersMapper: ContainerListBlobHierarchySegmentHeaders +const blobCacheControl = { + parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], + mapper: { + serializedName: "x-ms-blob-cache-control", + xmlName: "x-ms-blob-cache-control", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - comp2, - prefix, - marker, - maxPageSize, - restype2, - include1, - delimiter - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$4 }; -const getAccountInfoOperationSpec$1 = { - path: "/{containerName}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: ContainerGetAccountInfoHeaders +const blobContentType = { + parameterPath: ["options", "blobHttpHeaders", "blobContentType"], + mapper: { + serializedName: "x-ms-blob-content-type", + xmlName: "x-ms-blob-content-type", + type: { + name: "String", + }, + }, +}; +const blobContentMD5 = { + parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], + mapper: { + serializedName: "x-ms-blob-content-md5", + xmlName: "x-ms-blob-content-md5", + type: { + name: "ByteArray", + }, + }, +}; +const blobContentEncoding = { + parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], + mapper: { + serializedName: "x-ms-blob-content-encoding", + xmlName: "x-ms-blob-content-encoding", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: ContainerGetAccountInfoExceptionHeaders - } }, - queryParameters: [comp, restype1], - urlParameters: [url], - headerParameters: [version, accept1], - isXML: true, - serializer: xmlSerializer$4 }; - -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -/** Class representing a Blob. */ -class Blob$1 { - /** - * Initialize a new instance of the class Blob class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; - } - /** - * The Download operation reads or downloads a blob from the system, including its metadata and - * properties. You can also call Download to read a snapshot. - * @param options The options parameters. - */ - download(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); - } - /** - * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system - * properties for the blob. It does not return the content of the blob. - * @param options The options parameters. - */ - getProperties(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); - } - /** - * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is - * permanently removed from the storage account. If the storage account's soft delete feature is - * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible - * immediately. However, the blob service retains the blob or snapshot for the number of days specified - * by the DeleteRetentionPolicy section of [Storage service properties] - * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is - * permanently removed from the storage account. Note that you continue to be charged for the - * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the - * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You - * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a - * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 - * (ResourceNotFound). - * @param options The options parameters. - */ - delete(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); - } - /** - * Undelete a blob that was previously soft deleted - * @param options The options parameters. - */ - undelete(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); - } - /** - * Sets the time a blob will expire and be deleted. - * @param expiryOptions Required. Indicates mode of the expiry time - * @param options The options parameters. - */ - setExpiry(expiryOptions, options) { - const operationArguments = { - expiryOptions, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); - } - /** - * The Set HTTP Headers operation sets system properties on the blob - * @param options The options parameters. - */ - setHttpHeaders(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); - } - /** - * The Set Immutability Policy operation sets the immutability policy on the blob - * @param options The options parameters. - */ - setImmutabilityPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); - } - /** - * The Delete Immutability Policy operation deletes the immutability policy on the blob - * @param options The options parameters. - */ - deleteImmutabilityPolicy(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); - } - /** - * The Set Legal Hold operation sets a legal hold on the blob. - * @param legalHold Specified if a legal hold should be set on the blob. - * @param options The options parameters. - */ - setLegalHold(legalHold, options) { - const operationArguments = { - legalHold, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); - } - /** - * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more - * name-value pairs - * @param options The options parameters. - */ - setMetadata(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); - } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param options The options parameters. - */ - acquireLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); - } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - releaseLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); - } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param options The options parameters. - */ - renewLease(leaseId, options) { - const operationArguments = { - leaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); - } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param leaseId Specifies the current lease ID on the resource. - * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 - * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor - * (String) for a list of valid GUID string formats. - * @param options The options parameters. - */ - changeLease(leaseId, proposedLeaseId, options) { - const operationArguments = { - leaseId, - proposedLeaseId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); - } - /** - * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete - * operations - * @param options The options parameters. - */ - breakLease(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); - } - /** - * The Create Snapshot operation creates a read-only snapshot of a blob - * @param options The options parameters. - */ - createSnapshot(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); - } - /** - * The Start Copy From URL operation copies a blob or an internet resource to a new blob. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. - */ - startCopyFromURL(copySource, options) { - const operationArguments = { - copySource, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); - } - /** - * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return - * a response until the copy is complete. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. - */ - copyFromURL(copySource, options) { - const operationArguments = { - copySource, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); - } - /** - * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination - * blob with zero length and full metadata. - * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob - * operation. - * @param options The options parameters. - */ - abortCopyFromURL(copyId, options) { - const operationArguments = { - copyId, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); - } - /** - * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium - * storage account and on a block blob in a blob storage account (locally redundant storage only). A - * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block - * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's - * ETag. - * @param tier Indicates the tier to be set on the blob. - * @param options The options parameters. - */ - setTier(tier, options) { - const operationArguments = { - tier, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); - } - /** - * Returns the sku name and account kind - * @param options The options parameters. - */ - getAccountInfo(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); - } - /** - * The Query operation enables users to select/project on blob data by providing simple query - * expressions. - * @param options The options parameters. - */ - query(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, queryOperationSpec); - } - /** - * The Get Tags operation enables users to get the tags associated with a blob. - * @param options The options parameters. - */ - getTags(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); - } - /** - * The Set Tags operation enables users to set tags on a blob. - * @param options The options parameters. - */ - setTags(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); - } -} -// Operation Specifications -const xmlSerializer$3 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); -const downloadOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobDownloadHeaders +const blobContentLanguage = { + parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], + mapper: { + serializedName: "x-ms-blob-content-language", + xmlName: "x-ms-blob-content-language", + type: { + name: "String", }, - 206: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobDownloadHeaders + }, +}; +const blobContentDisposition = { + parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], + mapper: { + serializedName: "x-ms-blob-content-disposition", + xmlName: "x-ms-blob-content-disposition", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobDownloadExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - rangeGetContentMD5, - rangeGetContentCRC64, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 }; -const getPropertiesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "HEAD", - responses: { - 200: { - headersMapper: BlobGetPropertiesHeaders +const comp12 = { + parameterPath: "comp", + mapper: { + defaultValue: "immutabilityPolicies", + isConstant: true, + serializedName: "comp", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobGetPropertiesExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 }; -const deleteOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "DELETE", - responses: { - 202: { - headersMapper: BlobDeleteHeaders +const immutabilityPolicyExpiry = { + parameterPath: ["options", "immutabilityPolicyExpiry"], + mapper: { + serializedName: "x-ms-immutability-policy-until-date", + xmlName: "x-ms-immutability-policy-until-date", + type: { + name: "DateTimeRfc1123", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobDeleteExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - blobDeleteType - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - deleteSnapshots - ], - isXML: true, - serializer: xmlSerializer$3 }; -const undeleteOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobUndeleteHeaders +const immutabilityPolicyMode = { + parameterPath: ["options", "immutabilityPolicyMode"], + mapper: { + serializedName: "x-ms-immutability-policy-mode", + xmlName: "x-ms-immutability-policy-mode", + type: { + name: "Enum", + allowedValues: ["Mutable", "Unlocked", "Locked"], }, - default: { - bodyMapper: StorageError, - headersMapper: BlobUndeleteExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp8], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$3 }; -const setExpiryOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetExpiryHeaders +const comp13 = { + parameterPath: "comp", + mapper: { + defaultValue: "legalhold", + isConstant: true, + serializedName: "comp", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetExpiryExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp11], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - expiryOptions, - expiresOn - ], - isXML: true, - serializer: xmlSerializer$3 }; -const setHttpHeadersOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetHttpHeadersHeaders +const legalHold = { + parameterPath: "legalHold", + mapper: { + serializedName: "x-ms-legal-hold", + required: true, + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetHttpHeadersExceptionHeaders - } }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition +}; +const encryptionScope = { + parameterPath: ["options", "encryptionScope"], + mapper: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String", + }, + }, +}; +const comp14 = { + parameterPath: "comp", + mapper: { + defaultValue: "snapshot", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const tier = { + parameterPath: ["options", "tier"], + mapper: { + serializedName: "x-ms-access-tier", + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold", + ], + }, + }, +}; +const rehydratePriority = { + parameterPath: ["options", "rehydratePriority"], + mapper: { + serializedName: "x-ms-rehydrate-priority", + xmlName: "x-ms-rehydrate-priority", + type: { + name: "Enum", + allowedValues: ["High", "Standard"], + }, + }, +}; +const sourceIfModifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfModifiedSince", + ], + mapper: { + serializedName: "x-ms-source-if-modified-since", + xmlName: "x-ms-source-if-modified-since", + type: { + name: "DateTimeRfc1123", + }, + }, +}; +const sourceIfUnmodifiedSince = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfUnmodifiedSince", ], - isXML: true, - serializer: xmlSerializer$3 + mapper: { + serializedName: "x-ms-source-if-unmodified-since", + xmlName: "x-ms-source-if-unmodified-since", + type: { + name: "DateTimeRfc1123", + }, + }, }; -const setImmutabilityPolicyOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetImmutabilityPolicyHeaders +const sourceIfMatch = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], + mapper: { + serializedName: "x-ms-source-if-match", + xmlName: "x-ms-source-if-match", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetImmutabilityPolicyExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp12], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifUnmodifiedSince, - immutabilityPolicyExpiry, - immutabilityPolicyMode +}; +const sourceIfNoneMatch = { + parameterPath: [ + "options", + "sourceModifiedAccessConditions", + "sourceIfNoneMatch", ], - isXML: true, - serializer: xmlSerializer$3 + mapper: { + serializedName: "x-ms-source-if-none-match", + xmlName: "x-ms-source-if-none-match", + type: { + name: "String", + }, + }, }; -const deleteImmutabilityPolicyOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "DELETE", - responses: { - 200: { - headersMapper: BlobDeleteImmutabilityPolicyHeaders +const sourceIfTags = { + parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], + mapper: { + serializedName: "x-ms-source-if-tags", + xmlName: "x-ms-source-if-tags", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp12], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1 - ], - isXML: true, - serializer: xmlSerializer$3 }; -const setLegalHoldOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetLegalHoldHeaders +const copySource = { + parameterPath: "copySource", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetLegalHoldExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp13], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - legalHold - ], - isXML: true, - serializer: xmlSerializer$3 }; -const setMetadataOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetMetadataHeaders +const blobTagsString = { + parameterPath: ["options", "blobTagsString"], + mapper: { + serializedName: "x-ms-tags", + xmlName: "x-ms-tags", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetMetadataExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp6], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope - ], - isXML: true, - serializer: xmlSerializer$3 }; -const acquireLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlobAcquireLeaseHeaders +const sealBlob = { + parameterPath: ["options", "sealBlob"], + mapper: { + serializedName: "x-ms-seal-blob", + xmlName: "x-ms-seal-blob", + type: { + name: "Boolean", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobAcquireLeaseExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action, - duration, - proposedLeaseId, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 }; -const releaseLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobReleaseLeaseHeaders +const legalHold1 = { + parameterPath: ["options", "legalHold"], + mapper: { + serializedName: "x-ms-legal-hold", + xmlName: "x-ms-legal-hold", + type: { + name: "Boolean", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobReleaseLeaseExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action1, - leaseId1, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 }; -const renewLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobRenewLeaseHeaders +const xMsRequiresSync = { + parameterPath: "xMsRequiresSync", + mapper: { + defaultValue: "true", + isConstant: true, + serializedName: "x-ms-requires-sync", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobRenewLeaseExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action2, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 }; -const changeLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobChangeLeaseHeaders +const sourceContentMD5 = { + parameterPath: ["options", "sourceContentMD5"], + mapper: { + serializedName: "x-ms-source-content-md5", + xmlName: "x-ms-source-content-md5", + type: { + name: "ByteArray", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobChangeLeaseExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - leaseId1, - action4, - proposedLeaseId1, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 }; -const breakLeaseOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: BlobBreakLeaseHeaders +const copySourceAuthorization = { + parameterPath: ["options", "copySourceAuthorization"], + mapper: { + serializedName: "x-ms-copy-source-authorization", + xmlName: "x-ms-copy-source-authorization", + type: { + name: "String", + }, + }, +}; +const copySourceTags = { + parameterPath: ["options", "copySourceTags"], + mapper: { + serializedName: "x-ms-copy-source-tag-option", + xmlName: "x-ms-copy-source-tag-option", + type: { + name: "Enum", + allowedValues: ["REPLACE", "COPY"], }, - default: { - bodyMapper: StorageError, - headersMapper: BlobBreakLeaseExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp10], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - action3, - breakPeriod, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 }; -const createSnapshotOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlobCreateSnapshotHeaders +const comp15 = { + parameterPath: "comp", + mapper: { + defaultValue: "copy", + isConstant: true, + serializedName: "comp", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobCreateSnapshotExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp14], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope - ], - isXML: true, - serializer: xmlSerializer$3 }; -const startCopyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: BlobStartCopyFromURLHeaders +const copyActionAbortConstant = { + parameterPath: "copyActionAbortConstant", + mapper: { + defaultValue: "abort", + isConstant: true, + serializedName: "x-ms-copy-action", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobStartCopyFromURLExceptionHeaders - } }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - tier, - rehydratePriority, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceIfTags, - copySource, - blobTagsString, - sealBlob, - legalHold1 - ], - isXML: true, - serializer: xmlSerializer$3 }; -const copyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: BlobCopyFromURLHeaders +const copyId = { + parameterPath: "copyId", + mapper: { + serializedName: "copyid", + required: true, + xmlName: "copyid", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobCopyFromURLExceptionHeaders - } }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - copySource, - blobTagsString, - legalHold1, - xMsRequiresSync, - sourceContentMD5, - copySourceAuthorization, - copySourceTags - ], - isXML: true, - serializer: xmlSerializer$3 }; -const abortCopyFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 204: { - headersMapper: BlobAbortCopyFromURLHeaders +const comp16 = { + parameterPath: "comp", + mapper: { + defaultValue: "tier", + isConstant: true, + serializedName: "comp", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobAbortCopyFromURLExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - comp15, - copyId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - copyActionAbortConstant - ], - isXML: true, - serializer: xmlSerializer$3 }; -const setTierOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: BlobSetTierHeaders +const tier1 = { + parameterPath: "tier", + mapper: { + serializedName: "x-ms-access-tier", + required: true, + xmlName: "x-ms-access-tier", + type: { + name: "Enum", + allowedValues: [ + "P4", + "P6", + "P10", + "P15", + "P20", + "P30", + "P40", + "P50", + "P60", + "P70", + "P80", + "Hot", + "Cool", + "Archive", + "Cold", + ], }, - 202: { - headersMapper: BlobSetTierHeaders + }, +}; +const queryRequest = { + parameterPath: ["options", "queryRequest"], + mapper: QueryRequest, +}; +const comp17 = { + parameterPath: "comp", + mapper: { + defaultValue: "query", + isConstant: true, + serializedName: "comp", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetTierExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp16 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifTags, - rehydratePriority, - tier1 - ], - isXML: true, - serializer: xmlSerializer$3 }; -const getAccountInfoOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - headersMapper: BlobGetAccountInfoHeaders +const comp18 = { + parameterPath: "comp", + mapper: { + defaultValue: "tags", + isConstant: true, + serializedName: "comp", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobGetAccountInfoExceptionHeaders - } }, - queryParameters: [comp, restype1], - urlParameters: [url], - headerParameters: [version, accept1], - isXML: true, - serializer: xmlSerializer$3 }; -const queryOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "POST", - responses: { - 200: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobQueryHeaders +const tags = { + parameterPath: ["options", "tags"], + mapper: BlobTags, +}; +const transactionalContentMD5 = { + parameterPath: ["options", "transactionalContentMD5"], + mapper: { + serializedName: "Content-MD5", + xmlName: "Content-MD5", + type: { + name: "ByteArray", }, - 206: { - bodyMapper: { - type: { name: "Stream" }, - serializedName: "parsedResponse" - }, - headersMapper: BlobQueryHeaders + }, +}; +const transactionalContentCrc64 = { + parameterPath: ["options", "transactionalContentCrc64"], + mapper: { + serializedName: "x-ms-content-crc64", + xmlName: "x-ms-content-crc64", + type: { + name: "ByteArray", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobQueryExceptionHeaders - } }, - requestBody: queryRequest, - queryParameters: [ - timeoutInSeconds, - snapshot, - comp17 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$3 }; -const getTagsOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: BlobTags, - headersMapper: BlobGetTagsHeaders +const blobType = { + parameterPath: "blobType", + mapper: { + defaultValue: "PageBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobGetTagsExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - snapshot, - versionId, - comp18 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifTags - ], - isXML: true, - serializer: xmlSerializer$3 }; -const setTagsOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 204: { - headersMapper: BlobSetTagsHeaders +const blobContentLength = { + parameterPath: "blobContentLength", + mapper: { + serializedName: "x-ms-blob-content-length", + required: true, + xmlName: "x-ms-blob-content-length", + type: { + name: "Number", }, - default: { - bodyMapper: StorageError, - headersMapper: BlobSetTagsExceptionHeaders - } }, - requestBody: tags, - queryParameters: [ - timeoutInSeconds, - versionId, - comp18 - ], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - leaseId, - ifTags, - transactionalContentMD5, - transactionalContentCrc64 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer$3 }; - -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -/** Class representing a PageBlob. */ -class PageBlob { - /** - * Initialize a new instance of the class PageBlob class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; - } - /** - * The Create operation creates a new page blob. - * @param contentLength The length of the request. - * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The - * page blob size must be aligned to a 512-byte boundary. - * @param options The options parameters. - */ - create(contentLength, blobContentLength, options) { - const operationArguments = { - contentLength, - blobContentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); - } - /** - * The Upload Pages operation writes a range of pages to a page blob - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. - */ - uploadPages(contentLength, body, options) { - const operationArguments = { - contentLength, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); - } - /** - * The Clear Pages operation clears a set of pages from a page blob - * @param contentLength The length of the request. - * @param options The options parameters. - */ - clearPages(contentLength, options) { - const operationArguments = { - contentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); - } - /** - * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a - * URL - * @param sourceUrl Specify a URL to the copy source. - * @param sourceRange Bytes of source data in the specified range. The length of this range should - * match the ContentLength header and x-ms-range/Range destination range header. - * @param contentLength The length of the request. - * @param range The range of bytes to which the source range would be written. The range should be 512 - * aligned and range-end is required. - * @param options The options parameters. - */ - uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { - const operationArguments = { - sourceUrl, - sourceRange, - contentLength, - range, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); - } - /** - * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a - * page blob - * @param options The options parameters. - */ - getPageRanges(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); - } - /** - * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were - * changed between target blob and previous snapshot. - * @param options The options parameters. - */ - getPageRangesDiff(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); - } - /** - * Resize the Blob - * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The - * page blob size must be aligned to a 512-byte boundary. - * @param options The options parameters. - */ - resize(blobContentLength, options) { - const operationArguments = { - blobContentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); - } - /** - * Update the sequence number of the blob - * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. - * This property applies to page blobs only. This property indicates how the service should modify the - * blob's sequence number - * @param options The options parameters. - */ - updateSequenceNumber(sequenceNumberAction, options) { - const operationArguments = { - sequenceNumberAction, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); - } - /** - * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. - * The snapshot is copied such that only the differential changes between the previously copied - * snapshot are transferred to the destination. The copied snapshots are complete copies of the - * original snapshot and can be read or copied from as usual. This API is supported since REST version - * 2016-05-31. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. - */ - copyIncremental(copySource, options) { - const operationArguments = { - copySource, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); - } -} -// Operation Specifications -const xmlSerializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); -const serializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); -const createOperationSpec$1 = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobCreateHeaders +const blobSequenceNumber = { + parameterPath: ["options", "blobSequenceNumber"], + mapper: { + defaultValue: 0, + serializedName: "x-ms-blob-sequence-number", + xmlName: "x-ms-blob-sequence-number", + type: { + name: "Number", }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobCreateExceptionHeaders - } }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - blobType, - blobContentLength, - blobSequenceNumber - ], - isXML: true, - serializer: xmlSerializer$2 }; -const uploadPagesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobUploadPagesHeaders +const contentType1 = { + parameterPath: ["options", "contentType"], + mapper: { + defaultValue: "application/octet-stream", + isConstant: true, + serializedName: "Content-Type", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUploadPagesExceptionHeaders - } }, - requestBody: body1, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - pageWrite, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo - ], - mediaType: "binary", - serializer: serializer$2 }; -const clearPagesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobClearPagesHeaders +const body1 = { + parameterPath: "body", + mapper: { + serializedName: "body", + required: true, + xmlName: "body", + type: { + name: "Stream", }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobClearPagesExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo, - pageWrite1 - ], - isXML: true, - serializer: xmlSerializer$2 }; -const uploadPagesFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: PageBlobUploadPagesFromURLHeaders +const accept2 = { + parameterPath: "accept", + mapper: { + defaultValue: "application/xml", + isConstant: true, + serializedName: "Accept", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUploadPagesFromURLExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp19], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - pageWrite, - ifSequenceNumberLessThanOrEqualTo, - ifSequenceNumberLessThan, - ifSequenceNumberEqualTo, - sourceUrl, - sourceRange, - sourceContentCrc64, - range1 - ], - isXML: true, - serializer: xmlSerializer$2 }; -const getPageRangesOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: PageList, - headersMapper: PageBlobGetPageRangesHeaders +const comp19 = { + parameterPath: "comp", + mapper: { + defaultValue: "page", + isConstant: true, + serializedName: "comp", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobGetPageRangesExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - snapshot, - comp20 - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - ifMatch, - ifNoneMatch, - ifTags - ], - isXML: true, - serializer: xmlSerializer$2 }; -const getPageRangesDiffOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: PageList, - headersMapper: PageBlobGetPageRangesDiffHeaders +const pageWrite = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "update", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobGetPageRangesDiffExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - marker, - maxPageSize, - snapshot, - comp20, - prevsnapshot - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - range, - ifMatch, - ifNoneMatch, - ifTags, - prevSnapshotUrl +}; +const ifSequenceNumberLessThanOrEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThanOrEqualTo", ], - isXML: true, - serializer: xmlSerializer$2 + mapper: { + serializedName: "x-ms-if-sequence-number-le", + xmlName: "x-ms-if-sequence-number-le", + type: { + name: "Number", + }, + }, }; -const resizeOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: PageBlobResizeHeaders +const ifSequenceNumberLessThan = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberLessThan", + ], + mapper: { + serializedName: "x-ms-if-sequence-number-lt", + xmlName: "x-ms-if-sequence-number-lt", + type: { + name: "Number", }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobResizeExceptionHeaders - } }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - blobContentLength +}; +const ifSequenceNumberEqualTo = { + parameterPath: [ + "options", + "sequenceNumberAccessConditions", + "ifSequenceNumberEqualTo", ], - isXML: true, - serializer: xmlSerializer$2 + mapper: { + serializedName: "x-ms-if-sequence-number-eq", + xmlName: "x-ms-if-sequence-number-eq", + type: { + name: "Number", + }, + }, +}; +const pageWrite1 = { + parameterPath: "pageWrite", + mapper: { + defaultValue: "clear", + isConstant: true, + serializedName: "x-ms-page-write", + type: { + name: "String", + }, + }, }; -const updateSequenceNumberOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: PageBlobUpdateSequenceNumberHeaders +const sourceUrl = { + parameterPath: "sourceUrl", + mapper: { + serializedName: "x-ms-copy-source", + required: true, + xmlName: "x-ms-copy-source", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders - } }, - queryParameters: [comp, timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - blobSequenceNumber, - sequenceNumberAction - ], - isXML: true, - serializer: xmlSerializer$2 }; -const copyIncrementalOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 202: { - headersMapper: PageBlobCopyIncrementalHeaders +const sourceRange = { + parameterPath: "sourceRange", + mapper: { + serializedName: "x-ms-source-range", + required: true, + xmlName: "x-ms-source-range", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: PageBlobCopyIncrementalExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp21], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - ifTags, - copySource - ], - isXML: true, - serializer: xmlSerializer$2 }; - -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -/** Class representing a AppendBlob. */ -class AppendBlob { - /** - * Initialize a new instance of the class AppendBlob class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; - } - /** - * The Create Append Blob operation creates a new append blob. - * @param contentLength The length of the request. - * @param options The options parameters. - */ - create(contentLength, options) { - const operationArguments = { - contentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec); - } - /** - * The Append Block operation commits a new block of data to the end of an existing append blob. The - * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to - * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. - */ - appendBlock(contentLength, body, options) { - const operationArguments = { - contentLength, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); - } - /** - * The Append Block operation commits a new block of data to the end of an existing append blob where - * the contents are read from a source url. The Append Block operation is permitted only if the blob - * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version - * 2015-02-21 version or later. - * @param sourceUrl Specify a URL to the copy source. - * @param contentLength The length of the request. - * @param options The options parameters. - */ - appendBlockFromUrl(sourceUrl, contentLength, options) { - const operationArguments = { - sourceUrl, - contentLength, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); - } - /** - * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version - * 2019-12-12 version or later. - * @param options The options parameters. - */ - seal(options) { - const operationArguments = { - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, sealOperationSpec); - } -} -// Operation Specifications -const xmlSerializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); -const serializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); -const createOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobCreateHeaders +const sourceContentCrc64 = { + parameterPath: ["options", "sourceContentCrc64"], + mapper: { + serializedName: "x-ms-source-content-crc64", + xmlName: "x-ms-source-content-crc64", + type: { + name: "ByteArray", }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobCreateExceptionHeaders - } }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - blobTagsString, - legalHold1, - blobType1 - ], - isXML: true, - serializer: xmlSerializer$1 }; -const appendBlockOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobAppendBlockHeaders +const range1 = { + parameterPath: "range", + mapper: { + serializedName: "x-ms-range", + required: true, + xmlName: "x-ms-range", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobAppendBlockExceptionHeaders - } }, - requestBody: body1, - queryParameters: [timeoutInSeconds, comp22], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - maxSize, - appendPosition - ], - mediaType: "binary", - serializer: serializer$1 }; -const appendBlockFromUrlOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: AppendBlobAppendBlockFromUrlHeaders +const comp20 = { + parameterPath: "comp", + mapper: { + defaultValue: "pagelist", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const prevsnapshot = { + parameterPath: ["options", "prevsnapshot"], + mapper: { + serializedName: "prevsnapshot", + xmlName: "prevsnapshot", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp22], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - transactionalContentMD5, - sourceUrl, - sourceContentCrc64, - maxSize, - appendPosition, - sourceRange1 - ], - isXML: true, - serializer: xmlSerializer$1 }; -const sealOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 200: { - headersMapper: AppendBlobSealHeaders +const prevSnapshotUrl = { + parameterPath: ["options", "prevSnapshotUrl"], + mapper: { + serializedName: "x-ms-previous-snapshot-url", + xmlName: "x-ms-previous-snapshot-url", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: AppendBlobSealExceptionHeaders - } }, - queryParameters: [timeoutInSeconds, comp23], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - ifMatch, - ifNoneMatch, - appendPosition - ], - isXML: true, - serializer: xmlSerializer$1 }; - -/* - * Copyright (c) Microsoft Corporation. - * Licensed under the MIT License. - * - * Code generated by Microsoft (R) AutoRest Code Generator. - * Changes may cause incorrect behavior and will be lost if the code is regenerated. - */ -/** Class representing a BlockBlob. */ -class BlockBlob { - /** - * Initialize a new instance of the class BlockBlob class. - * @param client Reference to the service client - */ - constructor(client) { - this.client = client; - } - /** - * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing - * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put - * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a - * partial update of the content of a block blob, use the Put Block List operation. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. - */ - upload(contentLength, body, options) { - const operationArguments = { - contentLength, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); - } - /** - * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read - * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are - * not supported with Put Blob from URL; the content of an existing blob is overwritten with the - * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, - * use the Put Block from URL API in conjunction with Put Block List. - * @param contentLength The length of the request. - * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to - * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would - * appear in a request URI. The source blob must either be public or must be authenticated via a shared - * access signature. - * @param options The options parameters. - */ - putBlobFromUrl(contentLength, copySource, options) { - const operationArguments = { - contentLength, - copySource, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); - } - /** - * The Stage Block operation creates a new block to be committed as part of a blob - * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string - * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified - * for the blockid parameter must be the same size for each block. - * @param contentLength The length of the request. - * @param body Initial data - * @param options The options parameters. - */ - stageBlock(blockId, contentLength, body, options) { - const operationArguments = { - blockId, - contentLength, - body, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); - } - /** - * The Stage Block operation creates a new block to be committed as part of a blob where the contents - * are read from a URL. - * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string - * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified - * for the blockid parameter must be the same size for each block. - * @param contentLength The length of the request. - * @param sourceUrl Specify a URL to the copy source. - * @param options The options parameters. - */ - stageBlockFromURL(blockId, contentLength, sourceUrl, options) { - const operationArguments = { - blockId, - contentLength, - sourceUrl, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); - } - /** - * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the - * blob. In order to be written as part of a blob, a block must have been successfully written to the - * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading - * only those blocks that have changed, then committing the new and existing blocks together. You can - * do this by specifying whether to commit a block from the committed block list or from the - * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list - * it may belong to. - * @param blocks Blob Blocks. - * @param options The options parameters. - */ - commitBlockList(blocks, options) { - const operationArguments = { - blocks, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); - } - /** - * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block - * blob - * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted - * blocks, or both lists together. - * @param options The options parameters. - */ - getBlockList(listType, options) { - const operationArguments = { - listType, - options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) - }; - return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); - } -} -// Operation Specifications -const xmlSerializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); -const serializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); -const uploadOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobUploadHeaders +const sequenceNumberAction = { + parameterPath: "sequenceNumberAction", + mapper: { + serializedName: "x-ms-sequence-number-action", + required: true, + xmlName: "x-ms-sequence-number-action", + type: { + name: "Enum", + allowedValues: ["max", "update", "increment"], }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobUploadExceptionHeaders - } }, - requestBody: body1, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2, - blobType2 - ], - mediaType: "binary", - serializer }; -const putBlobFromUrlOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobPutBlobFromUrlHeaders +const comp21 = { + parameterPath: "comp", + mapper: { + defaultValue: "incrementalcopy", + isConstant: true, + serializedName: "comp", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders - } }, - queryParameters: [timeoutInSeconds], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - encryptionScope, - tier, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceIfTags, - copySource, - blobTagsString, - sourceContentMD5, - copySourceAuthorization, - copySourceTags, - transactionalContentMD5, - blobType2, - copySourceBlobProperties +}; +const blobType1 = { + parameterPath: "blobType", + mapper: { + defaultValue: "AppendBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String", + }, + }, +}; +const comp22 = { + parameterPath: "comp", + mapper: { + defaultValue: "appendblock", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const maxSize = { + parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], + mapper: { + serializedName: "x-ms-blob-condition-maxsize", + xmlName: "x-ms-blob-condition-maxsize", + type: { + name: "Number", + }, + }, +}; +const appendPosition = { + parameterPath: [ + "options", + "appendPositionAccessConditions", + "appendPosition", ], - isXML: true, - serializer: xmlSerializer + mapper: { + serializedName: "x-ms-blob-condition-appendpos", + xmlName: "x-ms-blob-condition-appendpos", + type: { + name: "Number", + }, + }, }; -const stageBlockOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobStageBlockHeaders +const sourceRange1 = { + parameterPath: ["options", "sourceRange"], + mapper: { + serializedName: "x-ms-source-range", + xmlName: "x-ms-source-range", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobStageBlockExceptionHeaders - } }, - requestBody: body1, - queryParameters: [ - timeoutInSeconds, - comp24, - blockId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - contentLength, - leaseId, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - encryptionScope, - transactionalContentMD5, - transactionalContentCrc64, - contentType1, - accept2 - ], - mediaType: "binary", - serializer }; -const stageBlockFromURLOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobStageBlockFromURLHeaders +const comp23 = { + parameterPath: "comp", + mapper: { + defaultValue: "seal", + isConstant: true, + serializedName: "comp", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobStageBlockFromURLExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - comp24, - blockId - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - contentLength, - leaseId, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - encryptionScope, - sourceIfModifiedSince, - sourceIfUnmodifiedSince, - sourceIfMatch, - sourceIfNoneMatch, - sourceContentMD5, - copySourceAuthorization, - sourceUrl, - sourceContentCrc64, - sourceRange1 - ], - isXML: true, - serializer: xmlSerializer }; -const commitBlockListOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "PUT", - responses: { - 201: { - headersMapper: BlockBlobCommitBlockListHeaders +const blobType2 = { + parameterPath: "blobType", + mapper: { + defaultValue: "BlockBlob", + isConstant: true, + serializedName: "x-ms-blob-type", + type: { + name: "String", }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobCommitBlockListExceptionHeaders - } }, - requestBody: blocks, - queryParameters: [timeoutInSeconds, comp25], - urlParameters: [url], - headerParameters: [ - contentType, - accept, - version, - requestId, - metadata, - leaseId, - ifModifiedSince, - ifUnmodifiedSince, - encryptionKey, - encryptionKeySha256, - encryptionAlgorithm, - ifMatch, - ifNoneMatch, - ifTags, - blobCacheControl, - blobContentType, - blobContentMD5, - blobContentEncoding, - blobContentLanguage, - blobContentDisposition, - immutabilityPolicyExpiry, - immutabilityPolicyMode, - encryptionScope, - tier, - blobTagsString, - legalHold1, - transactionalContentMD5, - transactionalContentCrc64 - ], - isXML: true, - contentType: "application/xml; charset=utf-8", - mediaType: "xml", - serializer: xmlSerializer }; -const getBlockListOperationSpec = { - path: "/{containerName}/{blob}", - httpMethod: "GET", - responses: { - 200: { - bodyMapper: BlockList, - headersMapper: BlockBlobGetBlockListHeaders +const copySourceBlobProperties = { + parameterPath: ["options", "copySourceBlobProperties"], + mapper: { + serializedName: "x-ms-copy-source-blob-properties", + xmlName: "x-ms-copy-source-blob-properties", + type: { + name: "Boolean", }, - default: { - bodyMapper: StorageError, - headersMapper: BlockBlobGetBlockListExceptionHeaders - } }, - queryParameters: [ - timeoutInSeconds, - snapshot, - comp25, - listType - ], - urlParameters: [url], - headerParameters: [ - version, - requestId, - accept1, - leaseId, - ifTags - ], - isXML: true, - serializer: xmlSerializer }; - -// Copyright (c) Microsoft Corporation. -/** - * The `@azure/logger` configuration for this package. - */ -const logger = logger$1.createClientLogger("storage-blob"); - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -const SDK_VERSION = "12.17.0"; -const SERVICE_VERSION = "2023-11-03"; -const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB -const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB -const BLOCK_BLOB_MAX_BLOCKS = 50000; -const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB -const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB -const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; -const REQUEST_TIMEOUT = 100 * 1000; // In ms -/** - * The OAuth scope to use with Azure Storage. - */ -const StorageOAuthScopes = "https://storage.azure.com/.default"; -const URLConstants = { - Parameters: { - FORCE_BROWSER_NO_CACHE: "_", - SIGNATURE: "sig", - SNAPSHOT: "snapshot", - VERSIONID: "versionid", - TIMEOUT: "timeout", +const comp24 = { + parameterPath: "comp", + mapper: { + defaultValue: "block", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, }, }; -const HTTPURLConnection = { - HTTP_ACCEPTED: 202, - HTTP_CONFLICT: 409, - HTTP_NOT_FOUND: 404, - HTTP_PRECON_FAILED: 412, - HTTP_RANGE_NOT_SATISFIABLE: 416, +const blockId = { + parameterPath: "blockId", + mapper: { + serializedName: "blockid", + required: true, + xmlName: "blockid", + type: { + name: "String", + }, + }, }; -const HeaderConstants = { - AUTHORIZATION: "Authorization", - AUTHORIZATION_SCHEME: "Bearer", - CONTENT_ENCODING: "Content-Encoding", - CONTENT_ID: "Content-ID", - CONTENT_LANGUAGE: "Content-Language", - CONTENT_LENGTH: "Content-Length", - CONTENT_MD5: "Content-Md5", - CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", - CONTENT_TYPE: "Content-Type", - COOKIE: "Cookie", - DATE: "date", - IF_MATCH: "if-match", - IF_MODIFIED_SINCE: "if-modified-since", - IF_NONE_MATCH: "if-none-match", - IF_UNMODIFIED_SINCE: "if-unmodified-since", - PREFIX_FOR_STORAGE: "x-ms-", - RANGE: "Range", - USER_AGENT: "User-Agent", - X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", - X_MS_COPY_SOURCE: "x-ms-copy-source", - X_MS_DATE: "x-ms-date", - X_MS_ERROR_CODE: "x-ms-error-code", - X_MS_VERSION: "x-ms-version", +const blocks = { + parameterPath: "blocks", + mapper: BlockLookupList, +}; +const comp25 = { + parameterPath: "comp", + mapper: { + defaultValue: "blocklist", + isConstant: true, + serializedName: "comp", + type: { + name: "String", + }, + }, +}; +const listType = { + parameterPath: "listType", + mapper: { + defaultValue: "committed", + serializedName: "blocklisttype", + required: true, + xmlName: "blocklisttype", + type: { + name: "Enum", + allowedValues: ["committed", "uncommitted", "all"], + }, + }, }; -const ETagNone = ""; -const ETagAny = "*"; -const SIZE_1_MB = 1 * 1024 * 1024; -const BATCH_MAX_REQUEST = 256; -const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; -const HTTP_LINE_ENDING = "\r\n"; -const HTTP_VERSION_1_1 = "HTTP/1.1"; -const EncryptionAlgorithmAES25 = "AES256"; -const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; -const StorageBlobLoggingAllowedHeaderNames = [ - "Access-Control-Allow-Origin", - "Cache-Control", - "Content-Length", - "Content-Type", - "Date", - "Request-Id", - "traceparent", - "Transfer-Encoding", - "User-Agent", - "x-ms-client-request-id", - "x-ms-date", - "x-ms-error-code", - "x-ms-request-id", - "x-ms-return-client-request-id", - "x-ms-version", - "Accept-Ranges", - "Content-Disposition", - "Content-Encoding", - "Content-Language", - "Content-MD5", - "Content-Range", - "ETag", - "Last-Modified", - "Server", - "Vary", - "x-ms-content-crc64", - "x-ms-copy-action", - "x-ms-copy-completion-time", - "x-ms-copy-id", - "x-ms-copy-progress", - "x-ms-copy-status", - "x-ms-has-immutability-policy", - "x-ms-has-legal-hold", - "x-ms-lease-state", - "x-ms-lease-status", - "x-ms-range", - "x-ms-request-server-encrypted", - "x-ms-server-encrypted", - "x-ms-snapshot", - "x-ms-source-range", - "If-Match", - "If-Modified-Since", - "If-None-Match", - "If-Unmodified-Since", - "x-ms-access-tier", - "x-ms-access-tier-change-time", - "x-ms-access-tier-inferred", - "x-ms-account-kind", - "x-ms-archive-status", - "x-ms-blob-append-offset", - "x-ms-blob-cache-control", - "x-ms-blob-committed-block-count", - "x-ms-blob-condition-appendpos", - "x-ms-blob-condition-maxsize", - "x-ms-blob-content-disposition", - "x-ms-blob-content-encoding", - "x-ms-blob-content-language", - "x-ms-blob-content-length", - "x-ms-blob-content-md5", - "x-ms-blob-content-type", - "x-ms-blob-public-access", - "x-ms-blob-sequence-number", - "x-ms-blob-type", - "x-ms-copy-destination-snapshot", - "x-ms-creation-time", - "x-ms-default-encryption-scope", - "x-ms-delete-snapshots", - "x-ms-delete-type-permanent", - "x-ms-deny-encryption-scope-override", - "x-ms-encryption-algorithm", - "x-ms-if-sequence-number-eq", - "x-ms-if-sequence-number-le", - "x-ms-if-sequence-number-lt", - "x-ms-incremental-copy", - "x-ms-lease-action", - "x-ms-lease-break-period", - "x-ms-lease-duration", - "x-ms-lease-id", - "x-ms-lease-time", - "x-ms-page-write", - "x-ms-proposed-lease-id", - "x-ms-range-get-content-md5", - "x-ms-rehydrate-priority", - "x-ms-sequence-number-action", - "x-ms-sku-name", - "x-ms-source-content-md5", - "x-ms-source-if-match", - "x-ms-source-if-modified-since", - "x-ms-source-if-none-match", - "x-ms-source-if-unmodified-since", - "x-ms-tag-count", - "x-ms-encryption-key-sha256", - "x-ms-if-tags", - "x-ms-source-if-tags", -]; -const StorageBlobLoggingAllowedQueryParameters = [ - "comp", - "maxresults", - "rscc", - "rscd", - "rsce", - "rscl", - "rsct", - "se", - "si", - "sip", - "sp", - "spr", - "sr", - "srt", - "ss", - "st", - "sv", - "include", - "marker", - "prefix", - "copyid", - "restype", - "blockid", - "blocklisttype", - "delimiter", - "prevsnapshot", - "ske", - "skoid", - "sks", - "skt", - "sktid", - "skv", - "snapshot", -]; -const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; -const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; -/// List of ports used for path style addressing. -/// Path style addressing means that storage account is put in URI's Path segment in instead of in host. -const PathStylePorts = [ - "10000", - "10001", - "10002", - "10003", - "10004", - "10100", - "10101", - "10102", - "10103", - "10104", - "11000", - "11001", - "11002", - "11003", - "11004", - "11100", - "11101", - "11102", - "11103", - "11104", -]; -// Copyright (c) Microsoft Corporation. -/** - * Reserved URL characters must be properly escaped for Storage services like Blob or File. - * - * ## URL encode and escape strategy for JS SDKs - * - * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. - * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL - * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. - * - * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. - * - * This is what legacy V2 SDK does, simple and works for most of the cases. - * - When customer URL string is "http://account.blob.core.windows.net/con/b:", - * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. - * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", - * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. - * - * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is - * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. - * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. - * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. - * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: - * - * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. - * - * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. - * - When customer URL string is "http://account.blob.core.windows.net/con/b:", - * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. - * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", - * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. - * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", - * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. - * - * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string - * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. - * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. - * And following URL strings are invalid: - * - "http://account.blob.core.windows.net/con/b%" - * - "http://account.blob.core.windows.net/con/b%2" - * - "http://account.blob.core.windows.net/con/b%G" - * - * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. - * - * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` - * - * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. - * - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. * - * @param url - + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -function escapeURLPath(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let path = urlParsed.getPath(); - path = path || "/"; - path = escape(path); - urlParsed.setPath(path); - return urlParsed.toString(); -} -function getProxyUriFromDevConnString(connectionString) { - // Development Connection String - // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key - let proxyUri = ""; - if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { - // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri - const matchCredentials = connectionString.split(";"); - for (const element of matchCredentials) { - if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { - proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; - } - } +/** Class containing Service operations. */ +class ServiceImpl { + /** + * Initialize a new instance of the class Service class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; + } + /** + * Sets properties for a storage account's Blob service endpoint, including properties for Storage + * Analytics and CORS (Cross-Origin Resource Sharing) rules + * @param blobServiceProperties The StorageService properties. + * @param options The options parameters. + */ + setProperties(blobServiceProperties, options) { + return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec); } - return proxyUri; -} -function getValueInConnString(connectionString, argument) { - const elements = connectionString.split(";"); - for (const element of elements) { - if (element.trim().startsWith(argument)) { - return element.trim().match(argument + "=(.*)")[1]; - } + /** + * gets the properties of a storage account's Blob service, including properties for Storage Analytics + * and CORS (Cross-Origin Resource Sharing) rules. + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$2); + } + /** + * Retrieves statistics related to replication for the Blob service. It is only available on the + * secondary location endpoint when read-access geo-redundant replication is enabled for the storage + * account. + * @param options The options parameters. + */ + getStatistics(options) { + return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); + } + /** + * The List Containers Segment operation returns a list of the containers under the specified account + * @param options The options parameters. + */ + listContainersSegment(options) { + return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); + } + /** + * Retrieves a user delegation key for the Blob service. This is only a valid operation when using + * bearer token authentication. + * @param keyInfo Key information + * @param options The options parameters. + */ + getUserDelegationKey(keyInfo, options) { + return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$2); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec$1); + } + /** + * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a + * given search expression. Filter blobs searches across all containers within a storage account but + * can be scoped within the expression to a single container. + * @param options The options parameters. + */ + filterBlobs(options) { + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec$1); } - return ""; } -/** - * Extracts the parts of an Azure Storage account connection string. +// Operation Specifications +const xmlSerializer$5 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const setPropertiesOperationSpec = { + path: "/", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ServiceSetPropertiesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSetPropertiesExceptionHeaders, + }, + }, + requestBody: blobServiceProperties, + queryParameters: [ + restype, + comp, + timeoutInSeconds, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5, +}; +const getPropertiesOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceProperties, + headersMapper: ServiceGetPropertiesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetPropertiesExceptionHeaders, + }, + }, + queryParameters: [ + restype, + comp, + timeoutInSeconds, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; +const getStatisticsOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobServiceStatistics, + headersMapper: ServiceGetStatisticsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetStatisticsExceptionHeaders, + }, + }, + queryParameters: [ + restype, + timeoutInSeconds, + comp1, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; +const listContainersSegmentOperationSpec = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListContainersSegmentResponse, + headersMapper: ServiceListContainersSegmentHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceListContainersSegmentExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + include, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; +const getUserDelegationKeyOperationSpec = { + path: "/", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: UserDelegationKey, + headersMapper: ServiceGetUserDelegationKeyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetUserDelegationKeyExceptionHeaders, + }, + }, + requestBody: keyInfo, + queryParameters: [ + restype, + timeoutInSeconds, + comp3, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5, +}; +const getAccountInfoOperationSpec$2 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ServiceGetAccountInfoHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceGetAccountInfoExceptionHeaders, + }, + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$5, +}; +const submitBatchOperationSpec$1 = { + path: "/", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: ServiceSubmitBatchHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceSubmitBatchExceptionHeaders, + }, + }, + requestBody: body, + queryParameters: [timeoutInSeconds, comp4], + urlParameters: [url], + headerParameters: [ + accept, + version, + requestId, + contentLength, + multipartContentType, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$5, +}; +const filterBlobsOperationSpec$1 = { + path: "/", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ServiceFilterBlobsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ServiceFilterBlobsExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$5, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. * - * @param connectionString - Connection string. - * @returns String key value pairs of the storage account's url and credentials. + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -function extractConnectionStringParts(connectionString) { - let proxyUri = ""; - if (connectionString.startsWith("UseDevelopmentStorage=true")) { - // Development connection string - proxyUri = getProxyUriFromDevConnString(connectionString); - connectionString = DevelopmentConnectionString; +/** Class containing Container operations. */ +class ContainerImpl { + /** + * Initialize a new instance of the class Container class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; } - // Matching BlobEndpoint in the Account connection string - let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); - // Slicing off '/' at the end if exists - // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) - blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; - if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && - connectionString.search("AccountKey=") !== -1) { - // Account connection string - let defaultEndpointsProtocol = ""; - let accountName = ""; - let accountKey = Buffer.from("accountKey", "base64"); - let endpointSuffix = ""; - // Get account name and key - accountName = getValueInConnString(connectionString, "AccountName"); - accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); - if (!blobEndpoint) { - // BlobEndpoint is not present in the Account connection string - // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` - defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); - const protocol = defaultEndpointsProtocol.toLowerCase(); - if (protocol !== "https" && protocol !== "http") { - throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); - } - endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); - if (!endpointSuffix) { - throw new Error("Invalid EndpointSuffix in the provided Connection String"); - } - blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; - } - if (!accountName) { - throw new Error("Invalid AccountName in the provided Connection String"); - } - else if (accountKey.length === 0) { - throw new Error("Invalid AccountKey in the provided Connection String"); - } - return { - kind: "AccountConnString", - url: blobEndpoint, - accountName, - accountKey, - proxyUri, - }; + /** + * creates a new container under the specified account. If the container with the same name already + * exists, the operation fails + * @param options The options parameters. + */ + create(options) { + return this.client.sendOperationRequest({ options }, createOperationSpec$2); } - else { - // SAS connection string - const accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); - let accountName = getValueInConnString(connectionString, "AccountName"); - // if accountName is empty, try to read it from BlobEndpoint - if (!accountName) { - accountName = getAccountNameFromUrl(blobEndpoint); - } - if (!blobEndpoint) { - throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); - } - else if (!accountSas) { - throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); - } - return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; + /** + * returns all user-defined metadata and system properties for the specified container. The data + * returned does not include the container's list of blobs + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$1); } -} -/** - * Internal escape method implemented Strategy Two mentioned in escapeURL() description. - * - * @param text - - */ -function escape(text) { - return encodeURIComponent(text) - .replace(/%2F/g, "/") // Don't escape for "/" - .replace(/'/g, "%27") // Escape for "'" - .replace(/\+/g, "%20") - .replace(/%25/g, "%"); // Revert encoded "%" -} -/** - * Append a string to URL path. Will remove duplicated "/" in front of the string - * when URL path ends with a "/". - * - * @param url - Source URL string - * @param name - String to be appended to URL - * @returns An updated URL string - */ -function appendToURLPath(url, name) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let path = urlParsed.getPath(); - path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; - urlParsed.setPath(path); - const normalizedUrl = new URL(urlParsed.toString()); - return normalizedUrl.toString(); -} -/** - * Set URL parameter name and value. If name exists in URL parameters, old value - * will be replaced by name key. If not provide value, the parameter will be deleted. - * - * @param url - Source URL string - * @param name - Parameter name - * @param value - Parameter value - * @returns An updated URL string - */ -function setURLParameter(url, name, value) { - const urlParsed = coreHttp.URLBuilder.parse(url); - urlParsed.setQueryParameter(name, value); - return urlParsed.toString(); -} -/** - * Get URL parameter by name. - * - * @param url - - * @param name - - */ -function getURLParameter(url, name) { - const urlParsed = coreHttp.URLBuilder.parse(url); - return urlParsed.getQueryParameterValue(name); -} -/** - * Set URL host. - * - * @param url - Source URL string - * @param host - New host string - * @returns An updated URL string - */ -function setURLHost(url, host) { - const urlParsed = coreHttp.URLBuilder.parse(url); - urlParsed.setHost(host); - return urlParsed.toString(); -} -/** - * Get URL path from an URL string. - * - * @param url - Source URL string - */ -function getURLPath(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - return urlParsed.getPath(); -} -/** - * Get URL scheme from an URL string. - * - * @param url - Source URL string - */ -function getURLScheme(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - return urlParsed.getScheme(); -} -/** - * Get URL path and query from an URL string. - * - * @param url - Source URL string - */ -function getURLPathAndQuery(url) { - const urlParsed = coreHttp.URLBuilder.parse(url); - const pathString = urlParsed.getPath(); - if (!pathString) { - throw new RangeError("Invalid url without valid path."); + /** + * operation marks the specified container for deletion. The container and any blobs contained within + * it are later deleted during garbage collection + * @param options The options parameters. + */ + delete(options) { + return this.client.sendOperationRequest({ options }, deleteOperationSpec$1); } - let queryString = urlParsed.getQuery() || ""; - queryString = queryString.trim(); - if (queryString !== "") { - queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' + /** + * operation sets one or more user-defined name-value pairs for the specified container. + * @param options The options parameters. + */ + setMetadata(options) { + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec$1); } - return `${pathString}${queryString}`; -} -/** - * Get URL query key value pairs from an URL string. - * - * @param url - - */ -function getURLQueries(url) { - let queryString = coreHttp.URLBuilder.parse(url).getQuery(); - if (!queryString) { - return {}; + /** + * gets the permissions for the specified container. The permissions indicate whether container data + * may be accessed publicly. + * @param options The options parameters. + */ + getAccessPolicy(options) { + return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); } - queryString = queryString.trim(); - queryString = queryString.startsWith("?") ? queryString.substr(1) : queryString; - let querySubStrings = queryString.split("&"); - querySubStrings = querySubStrings.filter((value) => { - const indexOfEqual = value.indexOf("="); - const lastIndexOfEqual = value.lastIndexOf("="); - return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); - }); - const queries = {}; - for (const querySubString of querySubStrings) { - const splitResults = querySubString.split("="); - const key = splitResults[0]; - const value = splitResults[1]; - queries[key] = value; + /** + * sets the permissions for the specified container. The permissions indicate whether blobs in a + * container may be accessed publicly. + * @param options The options parameters. + */ + setAccessPolicy(options) { + return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); + } + /** + * Restores a previously-deleted container. + * @param options The options parameters. + */ + restore(options) { + return this.client.sendOperationRequest({ options }, restoreOperationSpec); + } + /** + * Renames an existing container. + * @param sourceContainerName Required. Specifies the name of the container to rename. + * @param options The options parameters. + */ + rename(sourceContainerName, options) { + return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec); + } + /** + * The Batch operation allows multiple API calls to be embedded into a single HTTP request. + * @param contentLength The length of the request. + * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch + * boundary. Example header value: multipart/mixed; boundary=batch_ + * @param body Initial data + * @param options The options parameters. + */ + submitBatch(contentLength, multipartContentType, body, options) { + return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); + } + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + acquireLease(options) { + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec$1); } - return queries; -} -/** - * Append a string to URL query. - * - * @param url - Source URL string. - * @param queryParts - String to be appended to the URL query. - * @returns An updated URL string. - */ -function appendToURLQuery(url, queryParts) { - const urlParsed = coreHttp.URLBuilder.parse(url); - let query = urlParsed.getQuery(); - if (query) { - query += "&" + queryParts; + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec$1); } - else { - query = queryParts; + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec$1); } - urlParsed.setQuery(query); - return urlParsed.toString(); -} -/** - * Rounds a date off to seconds. - * - * @param date - - * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; - * If false, YYYY-MM-DDThh:mm:ssZ will be returned. - * @returns Date string in ISO8061 format, with or without 7 milliseconds component - */ -function truncatedISO8061Date(date, withMilliseconds = true) { - // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" - const dateString = date.toISOString(); - return withMilliseconds - ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" - : dateString.substring(0, dateString.length - 5) + "Z"; -} -/** - * Base64 encode. - * - * @param content - - */ -function base64encode(content) { - return !coreHttp.isNode ? btoa(content) : Buffer.from(content).toString("base64"); -} -/** - * Generate a 64 bytes base64 block ID string. - * - * @param blockIndex - - */ -function generateBlockID(blockIDPrefix, blockIndex) { - // To generate a 64 bytes base64 string, source string should be 48 - const maxSourceStringLength = 48; - // A blob can have a maximum of 100,000 uncommitted blocks at any given time - const maxBlockIndexLength = 6; - const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; - if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { - blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param options The options parameters. + */ + breakLease(options) { + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec$1); } - const res = blockIDPrefix + - padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); - return base64encode(res); -} -/** - * Delay specified time interval. - * - * @param timeInMs - - * @param aborter - - * @param abortError - - */ -async function delay(timeInMs, aborter, abortError) { - return new Promise((resolve, reject) => { - /* eslint-disable-next-line prefer-const */ - let timeout; - const abortHandler = () => { - if (timeout !== undefined) { - clearTimeout(timeout); - } - reject(abortError); - }; - const resolveHandler = () => { - if (aborter !== undefined) { - aborter.removeEventListener("abort", abortHandler); - } - resolve(); - }; - timeout = setTimeout(resolveHandler, timeInMs); - if (aborter !== undefined) { - aborter.addEventListener("abort", abortHandler); - } - }); -} -/** - * String.prototype.padStart() - * - * @param currentString - - * @param targetLength - - * @param padString - - */ -function padStart(currentString, targetLength, padString = " ") { - // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes - if (String.prototype.padStart) { - return currentString.padStart(targetLength, padString); + /** + * [Update] establishes and manages a lock on a container for delete operations. The lock duration can + * be 15 to 60 seconds, or can be infinite + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec$1); } - padString = padString || " "; - if (currentString.length > targetLength) { - return currentString; + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param options The options parameters. + */ + listBlobFlatSegment(options) { + return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); } - else { - targetLength = targetLength - currentString.length; - if (targetLength > padString.length) { - padString += padString.repeat(targetLength / padString.length); - } - return padString.slice(0, targetLength) + currentString; + /** + * [Update] The List Blobs operation returns a list of the blobs under the specified container + * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix + * element in the response body that acts as a placeholder for all blobs whose names begin with the + * same substring up to the appearance of the delimiter character. The delimiter may be a single + * character or a string. + * @param options The options parameters. + */ + listBlobHierarchySegment(delimiter, options) { + return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec); + } + /** + * Returns the sku name and account kind + * @param options The options parameters. + */ + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$1); } } -/** - * If two strings are equal when compared case insensitive. +// Operation Specifications +const xmlSerializer$4 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const createOperationSpec$2 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerCreateHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerCreateExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + access, + defaultEncryptionScope, + preventEncryptionScopeOverride, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const getPropertiesOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetPropertiesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetPropertiesExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const deleteOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: ContainerDeleteHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerDeleteExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, restype2], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const setMetadataOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetMetadataHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetMetadataExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp6, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const getAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { + name: "Sequence", + element: { + type: { name: "Composite", className: "SignedIdentifier" }, + }, + }, + serializedName: "SignedIdentifiers", + xmlName: "SignedIdentifiers", + xmlIsWrapped: true, + xmlElementName: "SignedIdentifier", + }, + headersMapper: ContainerGetAccessPolicyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccessPolicyExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const setAccessPolicyOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerSetAccessPolicyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSetAccessPolicyExceptionHeaders, + }, + }, + requestBody: containerAcl, + queryParameters: [ + timeoutInSeconds, + restype2, + comp7, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + access, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4, +}; +const restoreOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerRestoreHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRestoreExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp8, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + deletedContainerName, + deletedContainerVersion, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const renameOperationSpec = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenameHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenameExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp9, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + sourceContainerName, + sourceLeaseId, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const submitBatchOperationSpec = { + path: "/{containerName}", + httpMethod: "POST", + responses: { + 202: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: ContainerSubmitBatchHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerSubmitBatchExceptionHeaders, + }, + }, + requestBody: body, + queryParameters: [ + timeoutInSeconds, + comp4, + restype2, + ], + urlParameters: [url], + headerParameters: [ + accept, + version, + requestId, + contentLength, + multipartContentType, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$4, +}; +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ContainerFilterBlobsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerFilterBlobsExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + restype2, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const acquireLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: ContainerAcquireLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerAcquireLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const releaseLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerReleaseLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerReleaseLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const renewLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerRenewLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerRenewLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const breakLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: ContainerBreakLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerBreakLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const changeLeaseOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: ContainerChangeLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerChangeLeaseExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + restype2, + comp10, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const listBlobFlatSegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsFlatSegmentResponse, + headersMapper: ContainerListBlobFlatSegmentHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobFlatSegmentExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const listBlobHierarchySegmentOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: ListBlobsHierarchySegmentResponse, + headersMapper: ContainerListBlobHierarchySegmentHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp2, + prefix, + marker, + maxPageSize, + restype2, + include1, + delimiter, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$4, +}; +const getAccountInfoOperationSpec$1 = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: ContainerGetAccountInfoHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerGetAccountInfoExceptionHeaders, + }, + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$4, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. * - * @param str1 - - * @param str2 - - */ -function iEqual(str1, str2) { - return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); -} -/** - * Extracts account name from the url - * @param url - url to extract the account name from - * @returns with the account name + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -function getAccountNameFromUrl(url) { - const parsedUrl = coreHttp.URLBuilder.parse(url); - let accountName; - try { - if (parsedUrl.getHost().split(".")[1] === "blob") { - // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; - accountName = parsedUrl.getHost().split(".")[0]; - } - else if (isIpEndpointStyle(parsedUrl)) { - // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ - // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ - // .getPath() -> /devstoreaccount1/ - accountName = parsedUrl.getPath().split("/")[1]; - } - else { - // Custom domain case: "https://customdomain.com/containername/blob". - accountName = ""; - } - return accountName; - } - catch (error) { - throw new Error("Unable to extract accountName with provided information."); - } -} -function isIpEndpointStyle(parsedUrl) { - if (parsedUrl.getHost() === undefined) { - return false; +/** Class containing Blob operations. */ +class BlobImpl { + /** + * Initialize a new instance of the class Blob class. + * @param client Reference to the service client + */ + constructor(client) { + this.client = client; } - const host = parsedUrl.getHost() + (parsedUrl.getPort() === undefined ? "" : ":" + parsedUrl.getPort()); - // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. - // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. - // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. - // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. - return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || - (parsedUrl.getPort() !== undefined && PathStylePorts.includes(parsedUrl.getPort()))); -} -/** - * Convert Tags to encoded string. - * - * @param tags - - */ -function toBlobTagsString(tags) { - if (tags === undefined) { - return undefined; + /** + * The Download operation reads or downloads a blob from the system, including its metadata and + * properties. You can also call Download to read a snapshot. + * @param options The options parameters. + */ + download(options) { + return this.client.sendOperationRequest({ options }, downloadOperationSpec); } - const tagPairs = []; - for (const key in tags) { - if (Object.prototype.hasOwnProperty.call(tags, key)) { - const value = tags[key]; - tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); - } + /** + * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system + * properties for the blob. It does not return the content of the blob. + * @param options The options parameters. + */ + getProperties(options) { + return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); } - return tagPairs.join("&"); -} -/** - * Convert Tags type to BlobTags. - * - * @param tags - - */ -function toBlobTags(tags) { - if (tags === undefined) { - return undefined; + /** + * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is + * permanently removed from the storage account. If the storage account's soft delete feature is + * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible + * immediately. However, the blob service retains the blob or snapshot for the number of days specified + * by the DeleteRetentionPolicy section of [Storage service properties] + * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is + * permanently removed from the storage account. Note that you continue to be charged for the + * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the + * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You + * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a + * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 + * (ResourceNotFound). + * @param options The options parameters. + */ + delete(options) { + return this.client.sendOperationRequest({ options }, deleteOperationSpec); } - const res = { - blobTagSet: [], - }; - for (const key in tags) { - if (Object.prototype.hasOwnProperty.call(tags, key)) { - const value = tags[key]; - res.blobTagSet.push({ - key, - value, - }); - } + /** + * Undelete a blob that was previously soft deleted + * @param options The options parameters. + */ + undelete(options) { + return this.client.sendOperationRequest({ options }, undeleteOperationSpec); } - return res; -} -/** - * Covert BlobTags to Tags type. - * - * @param tags - - */ -function toTags(tags) { - if (tags === undefined) { - return undefined; + /** + * Sets the time a blob will expire and be deleted. + * @param expiryOptions Required. Indicates mode of the expiry time + * @param options The options parameters. + */ + setExpiry(expiryOptions, options) { + return this.client.sendOperationRequest({ expiryOptions, options }, setExpiryOperationSpec); } - const res = {}; - for (const blobTag of tags.blobTagSet) { - res[blobTag.key] = blobTag.value; + /** + * The Set HTTP Headers operation sets system properties on the blob + * @param options The options parameters. + */ + setHttpHeaders(options) { + return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); } - return res; -} -/** - * Convert BlobQueryTextConfiguration to QuerySerialization type. - * - * @param textConfiguration - - */ -function toQuerySerialization(textConfiguration) { - if (textConfiguration === undefined) { - return undefined; + /** + * The Set Immutability Policy operation sets the immutability policy on the blob + * @param options The options parameters. + */ + setImmutabilityPolicy(options) { + return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); } - switch (textConfiguration.kind) { - case "csv": - return { - format: { - type: "delimited", - delimitedTextConfiguration: { - columnSeparator: textConfiguration.columnSeparator || ",", - fieldQuote: textConfiguration.fieldQuote || "", - recordSeparator: textConfiguration.recordSeparator, - escapeChar: textConfiguration.escapeCharacter || "", - headersPresent: textConfiguration.hasHeaders || false, - }, - }, - }; - case "json": - return { - format: { - type: "json", - jsonTextConfiguration: { - recordSeparator: textConfiguration.recordSeparator, - }, - }, - }; - case "arrow": - return { - format: { - type: "arrow", - arrowConfiguration: { - schema: textConfiguration.schema, - }, - }, - }; - case "parquet": - return { - format: { - type: "parquet", - }, - }; - default: - throw Error("Invalid BlobQueryTextConfiguration."); + /** + * The Delete Immutability Policy operation deletes the immutability policy on the blob + * @param options The options parameters. + */ + deleteImmutabilityPolicy(options) { + return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); } -} -function parseObjectReplicationRecord(objectReplicationRecord) { - if (!objectReplicationRecord) { - return undefined; + /** + * The Set Legal Hold operation sets a legal hold on the blob. + * @param legalHold Specified if a legal hold should be set on the blob. + * @param options The options parameters. + */ + setLegalHold(legalHold, options) { + return this.client.sendOperationRequest({ legalHold, options }, setLegalHoldOperationSpec); } - if ("policy-id" in objectReplicationRecord) { - // If the dictionary contains a key with policy id, we are not required to do any parsing since - // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. - return undefined; + /** + * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more + * name-value pairs + * @param options The options parameters. + */ + setMetadata(options) { + return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); } - const orProperties = []; - for (const key in objectReplicationRecord) { - const ids = key.split("_"); - const policyPrefix = "or-"; - if (ids[0].startsWith(policyPrefix)) { - ids[0] = ids[0].substring(policyPrefix.length); - } - const rule = { - ruleId: ids[1], - replicationStatus: objectReplicationRecord[key], - }; - const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); - if (policyIndex > -1) { - orProperties[policyIndex].rules.push(rule); - } - else { - orProperties.push({ - policyId: ids[0], - rules: [rule], - }); - } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + acquireLease(options) { + return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); } - return orProperties; -} -/** - * Attach a TokenCredential to an object. - * - * @param thing - - * @param credential - - */ -function attachCredential(thing, credential) { - thing.credential = credential; - return thing; -} -function httpAuthorizationToString(httpAuthorization) { - return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; -} -function BlobNameToString(name) { - if (name.encoded) { - return decodeURIComponent(name.content); + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + releaseLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); } - else { - return name.content; + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param options The options parameters. + */ + renewLease(leaseId, options) { + return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); } -} -function ConvertInternalResponseOfListBlobFlat(internalResponse) { - return Object.assign(Object.assign({}, internalResponse), { segment: { - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); - return blobItem; - }), - } }); -} -function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { - var _a; - return Object.assign(Object.assign({}, internalResponse), { segment: { - blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { - const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); - return blobPrefix; - }), - blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); - return blobItem; - }), - } }); -} -function* ExtractPageRangeInfoItems(getPageRangesSegment) { - let pageRange = []; - let clearRange = []; - if (getPageRangesSegment.pageRange) - pageRange = getPageRangesSegment.pageRange; - if (getPageRangesSegment.clearRange) - clearRange = getPageRangesSegment.clearRange; - let pageRangeIndex = 0; - let clearRangeIndex = 0; - while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { - if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false, - }; - ++pageRangeIndex; - } - else { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true, - }; - ++clearRangeIndex; - } + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param leaseId Specifies the current lease ID on the resource. + * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 + * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor + * (String) for a list of valid GUID string formats. + * @param options The options parameters. + */ + changeLease(leaseId, proposedLeaseId, options) { + return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); } - for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { - yield { - start: pageRange[pageRangeIndex].start, - end: pageRange[pageRangeIndex].end, - isClear: false, - }; + /** + * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete + * operations + * @param options The options parameters. + */ + breakLease(options) { + return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); } - for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { - yield { - start: clearRange[clearRangeIndex].start, - end: clearRange[clearRangeIndex].end, - isClear: true, - }; + /** + * The Create Snapshot operation creates a read-only snapshot of a blob + * @param options The options parameters. + */ + createSnapshot(options) { + return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); } -} -/** - * Escape the blobName but keep path separator ('/'). - */ -function EscapePath(blobName) { - const split = blobName.split("/"); - for (let i = 0; i < split.length; i++) { - split[i] = encodeURIComponent(split[i]); + /** + * The Start Copy From URL operation copies a blob or an internet resource to a new blob. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + startCopyFromURL(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, startCopyFromURLOperationSpec); + } + /** + * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return + * a response until the copy is complete. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. + */ + copyFromURL(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, copyFromURLOperationSpec); + } + /** + * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination + * blob with zero length and full metadata. + * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob + * operation. + * @param options The options parameters. + */ + abortCopyFromURL(copyId, options) { + return this.client.sendOperationRequest({ copyId, options }, abortCopyFromURLOperationSpec); } - return split.join("/"); -} - -// Copyright (c) Microsoft Corporation. -/** - * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: - * - * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. - * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL - * thus avoid the browser cache. - * - * 2. Remove cookie header for security - * - * 3. Remove content-length header to avoid browsers warning - */ -class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { /** - * Creates an instance of StorageBrowserPolicy. - * @param nextPolicy - - * @param options - + * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium + * storage account and on a block blob in a blob storage account (locally redundant storage only). A + * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block + * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's + * ETag. + * @param tier Indicates the tier to be set on the blob. + * @param options The options parameters. */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); + setTier(tier, options) { + return this.client.sendOperationRequest({ tier, options }, setTierOperationSpec); } /** - * Sends out request. - * - * @param request - + * Returns the sku name and account kind + * @param options The options parameters. */ - async sendRequest(request) { - if (coreHttp.isNode) { - return this._nextPolicy.sendRequest(request); - } - if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { - request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); - } - request.headers.remove(HeaderConstants.COOKIE); - // According to XHR standards, content-length should be fully controlled by browsers - request.headers.remove(HeaderConstants.CONTENT_LENGTH); - return this._nextPolicy.sendRequest(request); + getAccountInfo(options) { + return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); } -} - -// Copyright (c) Microsoft Corporation. -/** - * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. - */ -class StorageBrowserPolicyFactory { /** - * Creates a StorageBrowserPolicyFactory object. - * - * @param nextPolicy - - * @param options - + * The Query operation enables users to select/project on blob data by providing simple query + * expressions. + * @param options The options parameters. */ - create(nextPolicy, options) { - return new StorageBrowserPolicy(nextPolicy, options); + query(options) { + return this.client.sendOperationRequest({ options }, queryOperationSpec); } -} - -// Copyright (c) Microsoft Corporation. -/** - * RetryPolicy types. - */ -exports.StorageRetryPolicyType = void 0; -(function (StorageRetryPolicyType) { /** - * Exponential retry. Retry time delay grows exponentially. + * The Get Tags operation enables users to get the tags associated with a blob. + * @param options The options parameters. */ - StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; + getTags(options) { + return this.client.sendOperationRequest({ options }, getTagsOperationSpec); + } /** - * Linear retry. Retry time delay grows linearly. + * The Set Tags operation enables users to set tags on a blob. + * @param options The options parameters. */ - StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; -})(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); -// Default values of StorageRetryOptions -const DEFAULT_RETRY_OPTIONS = { - maxRetryDelayInMs: 120 * 1000, - maxTries: 4, - retryDelayInMs: 4 * 1000, - retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, - secondaryHost: "", - tryTimeoutInMs: undefined, // Use server side default timeout strategy + setTags(options) { + return this.client.sendOperationRequest({ options }, setTagsOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$3 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const downloadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: BlobDownloadHeaders, + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: BlobDownloadHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDownloadExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + rangeGetContentMD5, + rangeGetContentCRC64, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const getPropertiesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "HEAD", + responses: { + 200: { + headersMapper: BlobGetPropertiesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetPropertiesExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const deleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 202: { + headersMapper: BlobDeleteHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + blobDeleteType, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + deleteSnapshots, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const undeleteOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobUndeleteHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobUndeleteExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp8], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setExpiryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetExpiryHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetExpiryExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp11], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + expiryOptions, + expiresOn, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setHttpHeadersOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetHttpHeadersHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetHttpHeadersExceptionHeaders, + }, + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetImmutabilityPolicyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetImmutabilityPolicyExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp12], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifUnmodifiedSince, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const deleteImmutabilityPolicyOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "DELETE", + responses: { + 200: { + headersMapper: BlobDeleteImmutabilityPolicyHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp12], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setLegalHoldOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetLegalHoldHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetLegalHoldExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp13], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + legalHold, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setMetadataOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetMetadataHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetMetadataExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp6], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const acquireLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobAcquireLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAcquireLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action, + duration, + proposedLeaseId, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const releaseLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobReleaseLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobReleaseLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action1, + leaseId1, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const renewLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobRenewLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobRenewLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action2, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const changeLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobChangeLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobChangeLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + leaseId1, + action4, + proposedLeaseId1, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const breakLeaseOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobBreakLeaseHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobBreakLeaseExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp10], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + action3, + breakPeriod, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const createSnapshotOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlobCreateSnapshotHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCreateSnapshotExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp14], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const startCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobStartCopyFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobStartCopyFromURLExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + tier, + rehydratePriority, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sealBlob, + legalHold1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const copyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: BlobCopyFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobCopyFromURLExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + copySource, + blobTagsString, + legalHold1, + xMsRequiresSync, + sourceContentMD5, + copySourceAuthorization, + copySourceTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const abortCopyFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobAbortCopyFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobAbortCopyFromURLExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp15, + copyId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + copyActionAbortConstant, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setTierOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: BlobSetTierHeaders, + }, + 202: { + headersMapper: BlobSetTierHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTierExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp16, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + rehydratePriority, + tier1, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const getAccountInfoOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + headersMapper: BlobGetAccountInfoHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetAccountInfoExceptionHeaders, + }, + }, + queryParameters: [comp, restype1], + urlParameters: [url], + headerParameters: [version, accept1], + isXML: true, + serializer: xmlSerializer$3, +}; +const queryOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "POST", + responses: { + 200: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: BlobQueryHeaders, + }, + 206: { + bodyMapper: { + type: { name: "Stream" }, + serializedName: "parsedResponse", + }, + headersMapper: BlobQueryHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobQueryExceptionHeaders, + }, + }, + requestBody: queryRequest, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp17, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3, +}; +const getTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlobTags, + headersMapper: BlobGetTagsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobGetTagsExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + versionId, + comp18, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$3, +}; +const setTagsOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 204: { + headersMapper: BlobSetTagsHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlobSetTagsExceptionHeaders, + }, + }, + requestBody: tags, + queryParameters: [ + timeoutInSeconds, + versionId, + comp18, + ], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + leaseId, + ifTags, + transactionalContentMD5, + transactionalContentCrc64, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer$3, }; -const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); -/** - * Retry policy with exponential retry and linear retry implemented. - */ -class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { - /** - * Creates an instance of RetryPolicy. - * - * @param nextPolicy - - * @param options - - * @param retryOptions - - */ - constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS) { - super(nextPolicy, options); - // Initialize retry options - this.retryOptions = { - retryPolicyType: retryOptions.retryPolicyType - ? retryOptions.retryPolicyType - : DEFAULT_RETRY_OPTIONS.retryPolicyType, - maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 - ? Math.floor(retryOptions.maxTries) - : DEFAULT_RETRY_OPTIONS.maxTries, - tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 - ? retryOptions.tryTimeoutInMs - : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs, - retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 - ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs - ? retryOptions.maxRetryDelayInMs - : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs) - : DEFAULT_RETRY_OPTIONS.retryDelayInMs, - maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 - ? retryOptions.maxRetryDelayInMs - : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, - secondaryHost: retryOptions.secondaryHost - ? retryOptions.secondaryHost - : DEFAULT_RETRY_OPTIONS.secondaryHost, - }; - } - /** - * Sends request. - * - * @param request - - */ - async sendRequest(request) { - return this.attemptSendRequest(request, false, 1); - } - /** - * Decide and perform next retry. Won't mutate request parameter. - * - * @param request - - * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then - * the resource was not found. This may be due to replication delay. So, in this - * case, we'll never try the secondary again for this operation. - * @param attempt - How many retries has been attempted to performed, starting from 1, which includes - * the attempt will be performed by this method call. - */ - async attemptSendRequest(request, secondaryHas404, attempt) { - const newRequest = request.clone(); - const isPrimaryRetry = secondaryHas404 || - !this.retryOptions.secondaryHost || - !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || - attempt % 2 === 1; - if (!isPrimaryRetry) { - newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); - } - // Set the server-side timeout query parameter "timeout=[seconds]" - if (this.retryOptions.tryTimeoutInMs) { - newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); - } - let response; - try { - logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); - response = await this._nextPolicy.sendRequest(newRequest); - if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { - return response; - } - secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); - } - catch (err) { - logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); - if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { - throw err; - } - } - await this.delay(isPrimaryRetry, attempt, request.abortSignal); - return this.attemptSendRequest(request, secondaryHas404, ++attempt); - } - /** - * Decide whether to retry according to last HTTP response and retry counters. - * - * @param isPrimaryRetry - - * @param attempt - - * @param response - - * @param err - - */ - shouldRetry(isPrimaryRetry, attempt, response, err) { - if (attempt >= this.retryOptions.maxTries) { - logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions - .maxTries}, no further try.`); - return false; - } - // Handle network failures, you may need to customize the list when you implement - // your own http client - const retriableErrors = [ - "ETIMEDOUT", - "ESOCKETTIMEDOUT", - "ECONNREFUSED", - "ECONNRESET", - "ENOENT", - "ENOTFOUND", - "TIMEOUT", - "EPIPE", - "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js - ]; - if (err) { - for (const retriableError of retriableErrors) { - if (err.name.toUpperCase().includes(retriableError) || - err.message.toUpperCase().includes(retriableError) || - (err.code && err.code.toString().toUpperCase() === retriableError)) { - logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); - return true; - } - } - } - // If attempt was against the secondary & it returned a StatusNotFound (404), then - // the resource was not found. This may be due to replication delay. So, in this - // case, we'll never try the secondary again for this operation. - if (response || err) { - const statusCode = response ? response.status : err ? err.statusCode : 0; - if (!isPrimaryRetry && statusCode === 404) { - logger.info(`RetryPolicy: Secondary access with 404, will retry.`); - return true; - } - // Server internal error or server timeout - if (statusCode === 503 || statusCode === 500) { - logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); - return true; - } - } - if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { - logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); - return true; - } - return false; - } - /** - * Delay a calculated time between retries. - * - * @param isPrimaryRetry - - * @param attempt - - * @param abortSignal - - */ - async delay(isPrimaryRetry, attempt, abortSignal) { - let delayTimeInMs = 0; - if (isPrimaryRetry) { - switch (this.retryOptions.retryPolicyType) { - case exports.StorageRetryPolicyType.EXPONENTIAL: - delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); - break; - case exports.StorageRetryPolicyType.FIXED: - delayTimeInMs = this.retryOptions.retryDelayInMs; - break; - } - } - else { - delayTimeInMs = Math.random() * 1000; - } - logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); - return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR); - } -} - -// Copyright (c) Microsoft Corporation. -/** - * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. - */ -class StorageRetryPolicyFactory { - /** - * Creates an instance of StorageRetryPolicyFactory. - * @param retryOptions - - */ - constructor(retryOptions) { - this.retryOptions = retryOptions; - } - /** - * Creates a StorageRetryPolicy object. - * - * @param nextPolicy - - * @param options - - */ - create(nextPolicy, options) { - return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); - } -} -// Copyright (c) Microsoft Corporation. -/** - * Credential policy used to sign HTTP(S) requests before sending. This is an - * abstract class. +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -class CredentialPolicy extends coreHttp.BaseRequestPolicy { +/** Class containing PageBlob operations. */ +class PageBlobImpl { /** - * Sends out request. - * - * @param request - + * Initialize a new instance of the class PageBlob class. + * @param client Reference to the service client */ - sendRequest(request) { - return this._nextPolicy.sendRequest(this.signRequest(request)); + constructor(client) { + this.client = client; } /** - * Child classes must implement this method with request signing. This method - * will be executed in {@link sendRequest}. - * - * @param request - + * The Create operation creates a new page blob. + * @param contentLength The length of the request. + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. */ - signRequest(request) { - // Child classes must override this method with request signing. This method - // will be executed in sendRequest(). - return request; + create(contentLength, blobContentLength, options) { + return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec$1); } -} - -// Copyright (c) Microsoft Corporation. -/** - * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources - * or for use with Shared Access Signatures (SAS). - */ -class AnonymousCredentialPolicy extends CredentialPolicy { /** - * Creates an instance of AnonymousCredentialPolicy. - * @param nextPolicy - - * @param options - + * The Upload Pages operation writes a range of pages to a page blob + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. */ - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); + uploadPages(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec); } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Credential is an abstract class for Azure Storage HTTP requests signing. This - * class will host an credentialPolicyCreator factory which generates CredentialPolicy. - */ -class Credential { /** - * Creates a RequestPolicy object. - * - * @param _nextPolicy - - * @param _options - + * The Clear Pages operation clears a set of pages from a page blob + * @param contentLength The length of the request. + * @param options The options parameters. */ - create(_nextPolicy, _options) { - throw new Error("Method should be implemented in children classes."); + clearPages(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec); } -} - -// Copyright (c) Microsoft Corporation. -/** - * AnonymousCredential provides a credentialPolicyCreator member used to create - * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with - * HTTP(S) requests that read public resources or for use with Shared Access - * Signatures (SAS). - */ -class AnonymousCredential extends Credential { /** - * Creates an {@link AnonymousCredentialPolicy} object. - * - * @param nextPolicy - - * @param options - + * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a + * URL + * @param sourceUrl Specify a URL to the copy source. + * @param sourceRange Bytes of source data in the specified range. The length of this range should + * match the ContentLength header and x-ms-range/Range destination range header. + * @param contentLength The length of the request. + * @param range The range of bytes to which the source range would be written. The range should be 512 + * aligned and range-end is required. + * @param options The options parameters. */ - create(nextPolicy, options) { - return new AnonymousCredentialPolicy(nextPolicy, options); + uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { + return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec); } -} - -// Copyright (c) Microsoft Corporation. -/** - * TelemetryPolicy is a policy used to tag user-agent header for every requests. - */ -class TelemetryPolicy extends coreHttp.BaseRequestPolicy { /** - * Creates an instance of TelemetryPolicy. - * @param nextPolicy - - * @param options - - * @param telemetry - + * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a + * page blob + * @param options The options parameters. */ - constructor(nextPolicy, options, telemetry) { - super(nextPolicy, options); - this.telemetry = telemetry; + getPageRanges(options) { + return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); } /** - * Sends out request. - * - * @param request - + * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were + * changed between target blob and previous snapshot. + * @param options The options parameters. */ - async sendRequest(request) { - if (coreHttp.isNode) { - if (!request.headers) { - request.headers = new coreHttp.HttpHeaders(); - } - if (!request.headers.get(HeaderConstants.USER_AGENT)) { - request.headers.set(HeaderConstants.USER_AGENT, this.telemetry); - } - } - return this._nextPolicy.sendRequest(request); + getPageRangesDiff(options) { + return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); } -} - -// Copyright (c) Microsoft Corporation. -/** - * TelemetryPolicyFactory is a factory class helping generating {@link TelemetryPolicy} objects. - */ -class TelemetryPolicyFactory { - /** - * Creates an instance of TelemetryPolicyFactory. - * @param telemetry - - */ - constructor(telemetry) { - const userAgentInfo = []; - if (coreHttp.isNode) { - if (telemetry) { - const telemetryString = telemetry.userAgentPrefix || ""; - if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { - userAgentInfo.push(telemetryString); - } - } - // e.g. azsdk-js-storageblob/10.0.0 - const libInfo = `azsdk-js-storageblob/${SDK_VERSION}`; - if (userAgentInfo.indexOf(libInfo) === -1) { - userAgentInfo.push(libInfo); - } - // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) - let runtimeInfo = `(NODE-VERSION ${process.version})`; - if (os__namespace) { - runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; - } - if (userAgentInfo.indexOf(runtimeInfo) === -1) { - userAgentInfo.push(runtimeInfo); - } - } - this.telemetryString = userAgentInfo.join(" "); + /** + * Resize the Blob + * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The + * page blob size must be aligned to a 512-byte boundary. + * @param options The options parameters. + */ + resize(blobContentLength, options) { + return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec); } /** - * Creates a TelemetryPolicy object. - * - * @param nextPolicy - - * @param options - + * Update the sequence number of the blob + * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. + * This property applies to page blobs only. This property indicates how the service should modify the + * blob's sequence number + * @param options The options parameters. */ - create(nextPolicy, options) { - return new TelemetryPolicy(nextPolicy, options, this.telemetryString); + updateSequenceNumber(sequenceNumberAction, options) { + return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec); } -} - -// Copyright (c) Microsoft Corporation. -const _defaultHttpClient = new coreHttp.DefaultHttpClient(); -function getCachedDefaultHttpClient() { - return _defaultHttpClient; -} - -// Copyright (c) Microsoft Corporation. -/** - * A set of constants used internally when processing requests. - */ -const Constants = { - DefaultScope: "/.default", /** - * Defines constants for use with HTTP headers. + * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. + * The snapshot is copied such that only the differential changes between the previously copied + * snapshot are transferred to the destination. The copied snapshots are complete copies of the + * original snapshot and can be read or copied from as usual. This API is supported since REST version + * 2016-05-31. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. */ - HeaderConstants: { - /** - * The Authorization header. - */ - AUTHORIZATION: "authorization", + copyIncremental(copySource, options) { + return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec); + } +} +// Operation Specifications +const xmlSerializer$2 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const createOperationSpec$1 = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobCreateHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCreateExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + blobType, + blobContentLength, + blobSequenceNumber, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const uploadPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer$2, +}; +const clearPagesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobClearPagesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobClearPagesExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + pageWrite1, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const uploadPagesFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: PageBlobUploadPagesFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUploadPagesFromURLExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp19], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + pageWrite, + ifSequenceNumberLessThanOrEqualTo, + ifSequenceNumberLessThan, + ifSequenceNumberEqualTo, + sourceUrl, + sourceRange, + sourceContentCrc64, + range1, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const getPageRangesOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesExceptionHeaders, + }, }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + ], + isXML: true, + serializer: xmlSerializer$2, }; -// Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1000, - retryIntervalInMs: 3000, - refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +const getPageRangesDiffOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: PageList, + headersMapper: PageBlobGetPageRangesDiffHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobGetPageRangesDiffExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + snapshot, + comp20, + prevsnapshot, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + range, + ifMatch, + ifNoneMatch, + ifTags, + prevSnapshotUrl, + ], + isXML: true, + serializer: xmlSerializer$2, }; -/** - * Converts an an unreliable access token getter (which may resolve with null) - * into an AccessTokenGetter by retrying the unreliable getter in a regular - * interval. - * - * @param getAccessToken - a function that produces a promise of an access - * token that may fail by returning null - * @param retryIntervalInMs - the time (in milliseconds) to wait between retry - * attempts - * @param timeoutInMs - the timestamp after which the refresh attempt will fail, - * throwing an exception - * @returns - a promise that, if it resolves, will resolve with an access token - */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } - } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; - } - } - let token = await tryGetAccessToken(); - while (token === null) { - await coreHttp.delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; -} -/** - * Creates a token cycler from a credential, scopes, and optional settings. - * - * A token cycler represents a way to reliably retrieve a valid access token - * from a TokenCredential. It will handle initializing the token, refreshing it - * when it nears expiration, and synchronizes refresh attempts to avoid - * concurrency hazards. - * - * @param credential - the underlying TokenCredential that provides the access - * token - * @param scopes - the scopes to request authorization for - * @param tokenCyclerOptions - optionally override default settings for the cycler - * - * @returns - a function that reliably produces a valid access token - */ -function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); - /** - * This little holder defines several predicates that we use to construct - * the rules of refreshing the token. - */ - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; +const resizeOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobResizeHeaders, }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - var _a; - return (!cycler.isRefreshing && - ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + default: { + bodyMapper: StorageError, + headersMapper: PageBlobResizeExceptionHeaders, }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + blobContentLength, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const updateSequenceNumberOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: PageBlobUpdateSequenceNumberHeaders, }, - }; - /** - * Starts a refresh job or returns the existing job if one is already - * running. - */ - function refresh(getTokenOptions) { - var _a; - if (!cycler.isRefreshing) { - // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - // Take advantage of promise chaining to insert an assignment to `token` - // before the refresh can be considered done. - refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { - refreshWorker = null; - token = _token; - return token; - }) - .catch((reason) => { - // We also should reset the refresher if we enter a failed state. All - // existing awaiters will throw, but subsequent requests will start a - // new retry chain. - refreshWorker = null; - token = null; - throw reason; - }); - } - return refreshWorker; - } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; -} -/** - * We will retrieve the challenge only if the response status code was 401, - * and if the response contained the header "WWW-Authenticate" with a non-empty value. - */ -function getChallenge(response) { - const challenge = response.headers.get("WWW-Authenticate"); - if (response.status === 401 && challenge) { - return challenge; - } - return; -} -/** - * Converts: `Bearer a="b" c="d"`. - * Into: `[ { a: 'b', c: 'd' }]`. - * - * @internal - */ -function parseChallenge(challenge) { - const bearerChallenge = challenge.slice("Bearer ".length); - const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); - const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); - // Key-value pairs to plain object: - return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); -} -// #endregion -/** - * Creates a new factory for a RequestPolicy that applies a bearer token to - * the requests' `Authorization` headers. - * - * @param credential - The TokenCredential implementation that can supply the bearer token. - * @param scopes - The scopes for which the bearer token applies. - */ -function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { - // This simple function encapsulates the entire process of reliably retrieving the token - let getToken = createTokenCycler(credential, scopes); - class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const getTokenInternal = getToken; - const token = (await getTokenInternal({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext, - }, - })).token; - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - const response = await this._nextPolicy.sendRequest(webResource); - if ((response === null || response === void 0 ? void 0 : response.status) === 401) { - const challenge = getChallenge(response); - if (challenge) { - const challengeInfo = parseChallenge(challenge); - const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; - const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); - const pathSegments = parsedAuthUri.getPath().split("/"); - const tenantId = pathSegments[1]; - const getTokenForChallenge = createTokenCycler(credential, challengeScopes); - const tokenForChallenge = (await getTokenForChallenge({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext, - }, - tenantId: tenantId, - })).token; - getToken = getTokenForChallenge; - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); - return this._nextPolicy.sendRequest(webResource); - } - } - return response; - } - } - return { - create: (nextPolicy, options) => { - return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + default: { + bodyMapper: StorageError, + headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders, }, - }; -} + }, + queryParameters: [comp, timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + blobSequenceNumber, + sequenceNumberAction, + ], + isXML: true, + serializer: xmlSerializer$2, +}; +const copyIncrementalOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 202: { + headersMapper: PageBlobCopyIncrementalHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: PageBlobCopyIncrementalExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp21], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + ifTags, + copySource, + ], + isXML: true, + serializer: xmlSerializer$2, +}; -// Copyright (c) Microsoft Corporation. -/** - * A helper to decide if a given argument satisfies the Pipeline contract - * @param pipeline - An argument that may be a Pipeline - * @returns true when the argument satisfies the Pipeline contract - */ -function isPipelineLike(pipeline) { - if (!pipeline || typeof pipeline !== "object") { - return false; - } - const castPipeline = pipeline; - return (Array.isArray(castPipeline.factories) && - typeof castPipeline.options === "object" && - typeof castPipeline.toServiceClientOptions === "function"); -} -/** - * A Pipeline class containing HTTP request policies. - * You can create a default Pipeline by calling {@link newPipeline}. - * Or you can create a Pipeline with your own policies by the constructor of Pipeline. +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. * - * Refer to {@link newPipeline} and provided policies before implementing your - * customized Pipeline. + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -class Pipeline { +/** Class containing AppendBlob operations. */ +class AppendBlobImpl { /** - * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. - * - * @param factories - - * @param options - + * Initialize a new instance of the class AppendBlob class. + * @param client Reference to the service client */ - constructor(factories, options = {}) { - this.factories = factories; - // when options.httpClient is not specified, passing in a DefaultHttpClient instance to - // avoid each client creating its own http client. - this.options = Object.assign(Object.assign({}, options), { httpClient: options.httpClient || getCachedDefaultHttpClient() }); + constructor(client) { + this.client = client; } /** - * Transfer Pipeline object to ServiceClientOptions object which is required by - * ServiceClient constructor. - * - * @returns The ServiceClientOptions object from this Pipeline. + * The Create Append Blob operation creates a new append blob. + * @param contentLength The length of the request. + * @param options The options parameters. */ - toServiceClientOptions() { - return { - httpClient: this.options.httpClient, - requestPolicyFactories: this.factories, - }; + create(contentLength, options) { + return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec); } -} -/** - * Creates a new Pipeline object with Credential provided. - * - * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. - * @param pipelineOptions - Optional. Options. - * @returns A new Pipeline object. - */ -function newPipeline(credential, pipelineOptions = {}) { - var _a; - if (credential === undefined) { - credential = new AnonymousCredential(); + /** + * The Append Block operation commits a new block of data to the end of an existing append blob. The + * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to + * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. + */ + appendBlock(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec); } - // Order is important. Closer to the API at the top & closer to the network at the bottom. - // The credential's policy factory must appear close to the wire so it can sign any - // changes made by other factories (like UniqueRequestIDPolicyFactory) - const telemetryPolicy = new TelemetryPolicyFactory(pipelineOptions.userAgentOptions); - const factories = [ - coreHttp.tracingPolicy({ userAgent: telemetryPolicy.telemetryString }), - coreHttp.keepAlivePolicy(pipelineOptions.keepAliveOptions), - telemetryPolicy, - coreHttp.generateClientRequestIdPolicy(), - new StorageBrowserPolicyFactory(), - new StorageRetryPolicyFactory(pipelineOptions.retryOptions), - // Default deserializationPolicy is provided by protocol layer - // Use customized XML char key of "#" so we could deserialize metadata - // with "_" key - coreHttp.deserializationPolicy(undefined, { xmlCharKey: "#" }), - coreHttp.logPolicy({ - logger: logger.info, - allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, - allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, - }), - ]; - if (coreHttp.isNode) { - // policies only available in Node.js runtime, not in browsers - factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); - factories.push(coreHttp.disableResponseDecompressionPolicy()); - } - factories.push(coreHttp.isTokenCredential(credential) - ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) - : credential); - return new Pipeline(factories, pipelineOptions); -} - -// Copyright (c) Microsoft Corporation. -/** - * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. - */ -class StorageSharedKeyCredentialPolicy extends CredentialPolicy { /** - * Creates an instance of StorageSharedKeyCredentialPolicy. - * @param nextPolicy - - * @param options - - * @param factory - + * The Append Block operation commits a new block of data to the end of an existing append blob where + * the contents are read from a source url. The Append Block operation is permitted only if the blob + * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version + * 2015-02-21 version or later. + * @param sourceUrl Specify a URL to the copy source. + * @param contentLength The length of the request. + * @param options The options parameters. */ - constructor(nextPolicy, options, factory) { - super(nextPolicy, options); - this.factory = factory; + appendBlockFromUrl(sourceUrl, contentLength, options) { + return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec); } /** - * Signs request. - * - * @param request - + * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version + * 2019-12-12 version or later. + * @param options The options parameters. */ - signRequest(request) { - request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); - if (request.body && - (typeof request.body === "string" || request.body !== undefined) && - request.body.length > 0) { - request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); - } - const stringToSign = [ - request.method.toUpperCase(), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), - this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), - this.getHeaderValueToSign(request, HeaderConstants.DATE), - this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), - this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), - this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.RANGE), - ].join("\n") + - "\n" + - this.getCanonicalizedHeadersString(request) + - this.getCanonicalizedResourceString(request); - const signature = this.factory.computeHMACSHA256(stringToSign); - request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); - // console.log(`[URL]:${request.url}`); - // console.log(`[HEADERS]:${request.headers.toString()}`); - // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); - // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); - return request; + seal(options) { + return this.client.sendOperationRequest({ options }, sealOperationSpec); } +} +// Operation Specifications +const xmlSerializer$1 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const createOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobCreateHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobCreateExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + blobTagsString, + legalHold1, + blobType1, + ], + isXML: true, + serializer: xmlSerializer$1, +}; +const appendBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + maxSize, + appendPosition, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer$1, +}; +const appendBlockFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: AppendBlobAppendBlockFromUrlHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp22], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + transactionalContentMD5, + sourceUrl, + sourceContentCrc64, + maxSize, + appendPosition, + sourceRange1, + ], + isXML: true, + serializer: xmlSerializer$1, +}; +const sealOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 200: { + headersMapper: AppendBlobSealHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: AppendBlobSealExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds, comp23], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + ifMatch, + ifNoneMatch, + appendPosition, + ], + isXML: true, + serializer: xmlSerializer$1, +}; + +/* + * Copyright (c) Microsoft Corporation. + * Licensed under the MIT License. + * + * Code generated by Microsoft (R) AutoRest Code Generator. + * Changes may cause incorrect behavior and will be lost if the code is regenerated. + */ +/** Class containing BlockBlob operations. */ +class BlockBlobImpl { /** - * Retrieve header value according to shared key sign rules. - * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key - * - * @param request - - * @param headerName - + * Initialize a new instance of the class BlockBlob class. + * @param client Reference to the service client */ - getHeaderValueToSign(request, headerName) { - const value = request.headers.get(headerName); - if (!value) { - return ""; - } - // When using version 2015-02-21 or later, if Content-Length is zero, then - // set the Content-Length part of the StringToSign to an empty string. - // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key - if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { - return ""; - } - return value; + constructor(client) { + this.client = client; } /** - * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: - * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. - * 2. Convert each HTTP header name to lowercase. - * 3. Sort the headers lexicographically by header name, in ascending order. - * Each header may appear only once in the string. - * 4. Replace any linear whitespace in the header value with a single space. - * 5. Trim any whitespace around the colon in the header. - * 6. Finally, append a new-line character to each canonicalized header in the resulting list. - * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. - * - * @param request - + * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing + * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put + * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a + * partial update of the content of a block blob, use the Put Block List operation. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. */ - getCanonicalizedHeadersString(request) { - let headersArray = request.headers.headersArray().filter((value) => { - return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); - }); - headersArray.sort((a, b) => { - return a.name.toLowerCase().localeCompare(b.name.toLowerCase()); - }); - // Remove duplicate headers - headersArray = headersArray.filter((value, index, array) => { - if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { - return false; - } - return true; - }); - let canonicalizedHeadersStringToSign = ""; - headersArray.forEach((header) => { - canonicalizedHeadersStringToSign += `${header.name - .toLowerCase() - .trimRight()}:${header.value.trimLeft()}\n`; - }); - return canonicalizedHeadersStringToSign; + upload(contentLength, body, options) { + return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec); } /** - * Retrieves the webResource canonicalized resource string. - * - * @param request - + * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read + * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are + * not supported with Put Blob from URL; the content of an existing blob is overwritten with the + * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, + * use the Put Block from URL API in conjunction with Put Block List. + * @param contentLength The length of the request. + * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to + * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would + * appear in a request URI. The source blob must either be public or must be authenticated via a shared + * access signature. + * @param options The options parameters. */ - getCanonicalizedResourceString(request) { - const path = getURLPath(request.url) || "/"; - let canonicalizedResourceString = ""; - canonicalizedResourceString += `/${this.factory.accountName}${path}`; - const queries = getURLQueries(request.url); - const lowercaseQueries = {}; - if (queries) { - const queryKeys = []; - for (const key in queries) { - if (Object.prototype.hasOwnProperty.call(queries, key)) { - const lowercaseKey = key.toLowerCase(); - lowercaseQueries[lowercaseKey] = queries[key]; - queryKeys.push(lowercaseKey); - } - } - queryKeys.sort(); - for (const key of queryKeys) { - canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; - } - } - return canonicalizedResourceString; + putBlobFromUrl(contentLength, copySource, options) { + return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec); } -} - -// Copyright (c) Microsoft Corporation. -/** - * ONLY AVAILABLE IN NODE.JS RUNTIME. - * - * StorageSharedKeyCredential for account key authorization of Azure Storage service. - */ -class StorageSharedKeyCredential extends Credential { /** - * Creates an instance of StorageSharedKeyCredential. - * @param accountName - - * @param accountKey - + * The Stage Block operation creates a new block to be committed as part of a blob + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param body Initial data + * @param options The options parameters. */ - constructor(accountName, accountKey) { - super(); - this.accountName = accountName; - this.accountKey = Buffer.from(accountKey, "base64"); + stageBlock(blockId, contentLength, body, options) { + return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec); } /** - * Creates a StorageSharedKeyCredentialPolicy object. - * - * @param nextPolicy - - * @param options - + * The Stage Block operation creates a new block to be committed as part of a blob where the contents + * are read from a URL. + * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string + * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified + * for the blockid parameter must be the same size for each block. + * @param contentLength The length of the request. + * @param sourceUrl Specify a URL to the copy source. + * @param options The options parameters. */ - create(nextPolicy, options) { - return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); + stageBlockFromURL(blockId, contentLength, sourceUrl, options) { + return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec); } /** - * Generates a hash signature for an HTTP request or for a SAS. - * - * @param stringToSign - + * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the + * blob. In order to be written as part of a blob, a block must have been successfully written to the + * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading + * only those blocks that have changed, then committing the new and existing blocks together. You can + * do this by specifying whether to commit a block from the committed block list or from the + * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list + * it may belong to. + * @param blocks Blob Blocks. + * @param options The options parameters. */ - computeHMACSHA256(stringToSign) { - return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); + commitBlockList(blocks, options) { + return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec); + } + /** + * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block + * blob + * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted + * blocks, or both lists together. + * @param options The options parameters. + */ + getBlockList(listType, options) { + return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec); } } +// Operation Specifications +const xmlSerializer = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); +const uploadOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobUploadHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobUploadExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + blobType2, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer, +}; +const putBlobFromUrlOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobPutBlobFromUrlHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders, + }, + }, + queryParameters: [timeoutInSeconds], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + encryptionScope, + tier, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceIfTags, + copySource, + blobTagsString, + sourceContentMD5, + copySourceAuthorization, + copySourceTags, + transactionalContentMD5, + blobType2, + copySourceBlobProperties, + ], + isXML: true, + serializer: xmlSerializer, +}; +const stageBlockOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockExceptionHeaders, + }, + }, + requestBody: body1, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + transactionalContentMD5, + transactionalContentCrc64, + contentType1, + accept2, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "binary", + serializer: xmlSerializer, +}; +const stageBlockFromURLOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobStageBlockFromURLHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobStageBlockFromURLExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + comp24, + blockId, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + contentLength, + leaseId, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + encryptionScope, + sourceIfModifiedSince, + sourceIfUnmodifiedSince, + sourceIfMatch, + sourceIfNoneMatch, + sourceContentMD5, + copySourceAuthorization, + sourceUrl, + sourceContentCrc64, + sourceRange1, + ], + isXML: true, + serializer: xmlSerializer, +}; +const commitBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "PUT", + responses: { + 201: { + headersMapper: BlockBlobCommitBlockListHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobCommitBlockListExceptionHeaders, + }, + }, + requestBody: blocks, + queryParameters: [timeoutInSeconds, comp25], + urlParameters: [url], + headerParameters: [ + contentType, + accept, + version, + requestId, + metadata, + leaseId, + ifModifiedSince, + ifUnmodifiedSince, + encryptionKey, + encryptionKeySha256, + encryptionAlgorithm, + ifMatch, + ifNoneMatch, + ifTags, + blobCacheControl, + blobContentType, + blobContentMD5, + blobContentEncoding, + blobContentLanguage, + blobContentDisposition, + immutabilityPolicyExpiry, + immutabilityPolicyMode, + encryptionScope, + tier, + blobTagsString, + legalHold1, + transactionalContentMD5, + transactionalContentCrc64, + ], + isXML: true, + contentType: "application/xml; charset=utf-8", + mediaType: "xml", + serializer: xmlSerializer, +}; +const getBlockListOperationSpec = { + path: "/{containerName}/{blob}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: BlockList, + headersMapper: BlockBlobGetBlockListHeaders, + }, + default: { + bodyMapper: StorageError, + headersMapper: BlockBlobGetBlockListExceptionHeaders, + }, + }, + queryParameters: [ + timeoutInSeconds, + snapshot, + comp25, + listType, + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1, + leaseId, + ifTags, + ], + isXML: true, + serializer: xmlSerializer, +}; /* * Copyright (c) Microsoft Corporation. @@ -15198,16 +15174,15 @@ class StorageSharedKeyCredential extends Credential { * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ -const packageName = "azure-storage-blob"; -const packageVersion = "12.17.0"; -class StorageClientContext extends coreHttp__namespace.ServiceClient { +let StorageClient$1 = class StorageClient extends coreHttpCompat__namespace.ExtendedServiceClient { /** - * Initializes a new instance of the StorageClientContext class. + * Initializes a new instance of the StorageClient class. * @param url The URL of the service account, container, or blob that is the target of the desired * operation. * @param options The parameter options */ constructor(url, options) { + var _a, _b; if (url === undefined) { throw new Error("'url' cannot be null"); } @@ -15215,21 +15190,48 @@ class StorageClientContext extends coreHttp__namespace.ServiceClient { if (!options) { options = {}; } - if (!options.userAgent) { - const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); - options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; - } - super(undefined, options); - this.requestContentType = "application/json; charset=utf-8"; - this.baseUri = options.endpoint || "{url}"; + const defaults = { + requestContentType: "application/json; charset=utf-8", + }; + const packageDetails = `azsdk-js-azure-storage-blob/12.23.0`; + const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix + ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` + : `${packageDetails}`; + const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: { + userAgentPrefix, + }, endpoint: (_b = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri) !== null && _b !== void 0 ? _b : "{url}" }); + super(optionsWithDefaults); // Parameter assignments this.url = url; // Assigning values to Constant parameters this.version = options.version || "2023-11-03"; + this.service = new ServiceImpl(this); + this.container = new ContainerImpl(this); + this.blob = new BlobImpl(this); + this.pageBlob = new PageBlobImpl(this); + this.appendBlob = new AppendBlobImpl(this); + this.blockBlob = new BlockBlobImpl(this); + } +}; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * @internal + */ +class StorageContextClient extends StorageClient$1 { + async sendOperationRequest(operationArguments, operationSpec) { + const operationSpecToSend = Object.assign({}, operationSpec); + if (operationSpecToSend.path === "/{containerName}" || + operationSpecToSend.path === "/{containerName}/{blob}") { + operationSpecToSend.path = ""; + } + return super.sendOperationRequest(operationArguments, operationSpecToSend); } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} * and etc. @@ -15245,20 +15247,9 @@ class StorageClient { this.url = escapeURLPath(url); this.accountName = getAccountNameFromUrl(url); this.pipeline = pipeline; - this.storageClientContext = new StorageClientContext(this.url, pipeline.toServiceClientOptions()); + this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline)); this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); - this.credential = new AnonymousCredential(); - for (const factory of this.pipeline.factories) { - if ((coreHttp.isNode && factory instanceof StorageSharedKeyCredential) || - factory instanceof AnonymousCredential) { - this.credential = factory; - } - else if (coreHttp.isTokenCredential(factory.credential)) { - // Only works if the factory has been attached a "credential" property. - // We do that in newPipeline() when using TokenCredential. - this.credential = factory.credential; - } - } + this.credential = getCredentialFromPipeline(pipeline); // Override protocol layer's default content-type const storageClientContext = this.storageClientContext; storageClientContext.requestContentType = undefined; @@ -15266,29 +15257,16 @@ class StorageClient { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * Creates a span using the global tracer. * @internal */ -const createSpan = coreTracing.createSpanFunction({ - packagePrefix: "Azure.Storage.Blob", +const tracingClient = coreTracing.createTracingClient({ + packageName: "@azure/storage-blob", + packageVersion: SDK_VERSION, namespace: "Microsoft.Storage", }); -/** - * @internal - * - * Adapt the tracing options from OperationOptions to what they need to be for - * RequestOptionsBase (when we update to later OpenTelemetry versions this is now - * two separate fields, not just one). - */ -function convertTracingToRequestOptionsBase(options) { - var _a, _b; - return { - // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. - spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, - tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, - }; -} // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -15707,6 +15685,7 @@ class ContainerSASPermissions { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -15749,6 +15728,7 @@ function ipRangeToString(ipRange) { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * Protocols for generated SAS. */ @@ -15773,6 +15753,20 @@ exports.SASProtocol = void 0; * NOTE: Instances of this class are immutable. */ class SASQueryParameters { + /** + * Optional. IP range allowed for this SAS. + * + * @readonly + */ + get ipRange() { + if (this.ipRangeInner) { + return { + end: this.ipRangeInner.end, + start: this.ipRangeInner.start, + }; + } + return undefined; + } constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { this.version = version; this.signature = signature; @@ -15832,20 +15826,6 @@ class SASQueryParameters { } } } - /** - * Optional. IP range allowed for this SAS. - * - * @readonly - */ - get ipRange() { - if (this.ipRangeInner) { - return { - end: this.ipRangeInner.end, - start: this.ipRangeInner.start, - }; - } - return undefined; - } /** * Encodes all SAS query parameters into a string that can be appended to a URL. * @@ -15861,12 +15841,12 @@ class SASQueryParameters { "sip", "si", "ses", - "skoid", - "sktid", - "skt", - "ske", - "sks", - "skv", + "skoid", // Signed object ID + "sktid", // Signed tenant ID + "skt", // Signed key start time + "ske", // Signed key expiry time + "sks", // Signed key service + "skv", // Signed key version "sr", "sp", "sig", @@ -15980,6 +15960,7 @@ class SASQueryParameters { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential @@ -16365,7 +16346,7 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD userDelegationKeyCredential.userDelegationKey.signedService, userDelegationKeyCredential.userDelegationKey.signedVersion, blobSASSignatureValues.preauthorizedAgentObjectId, - undefined, + undefined, // agentObjectId blobSASSignatureValues.correlationId, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", @@ -16444,7 +16425,7 @@ function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userD userDelegationKeyCredential.userDelegationKey.signedService, userDelegationKeyCredential.userDelegationKey.signedVersion, blobSASSignatureValues.preauthorizedAgentObjectId, - undefined, + undefined, // agentObjectId blobSASSignatureValues.correlationId, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", @@ -16526,47 +16507,48 @@ function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. */ class BlobLeaseClient { + /** + * Gets the lease Id. + * + * @readonly + */ + get leaseId() { + return this._leaseId; + } + /** + * Gets the url. + * + * @readonly + */ + get url() { + return this._url; + } /** * Creates an instance of BlobLeaseClient. * @param client - The client to make the lease operation requests. * @param leaseId - Initial proposed lease id. */ constructor(client, leaseId) { - const clientContext = new StorageClientContext(client.url, client.pipeline.toServiceClientOptions()); + const clientContext = client.storageClientContext; this._url = client.url; if (client.name === undefined) { this._isContainer = true; - this._containerOrBlobOperation = new Container(clientContext); + this._containerOrBlobOperation = clientContext.container; } else { this._isContainer = false; - this._containerOrBlobOperation = new Blob$1(clientContext); + this._containerOrBlobOperation = clientContext.blob; } if (!leaseId) { - leaseId = coreHttp.generateUuid(); + leaseId = coreUtil.randomUUID(); } this._leaseId = leaseId; } - /** - * Gets the lease Id. - * - * @readonly - */ - get leaseId() { - return this._leaseId; - } - /** - * Gets the url. - * - * @readonly - */ - get url() { - return this._url; - } /** * Establishes and manages a lock on a container for delete operations, or on a blob * for write and delete operations. @@ -16580,27 +16562,23 @@ class BlobLeaseClient { * @returns Response data for acquire lease operation. */ async acquireLease(duration, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-acquireLease", options); + var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - try { - return await this._containerOrBlobOperation.acquireLease(Object.assign({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }), proposedLeaseId: this._leaseId }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { + var _a; + return assertResponse(await this._containerOrBlobOperation.acquireLease({ + abortSignal: options.abortSignal, + duration, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + proposedLeaseId: this._leaseId, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * To change the ID of the lease. @@ -16613,29 +16591,23 @@ class BlobLeaseClient { * @returns Response data for change lease operation. */ async changeLease(proposedLeaseId, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-changeLease", options); + var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - try { - const response = await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); this._leaseId = proposedLeaseId; return response; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * To free the lease if it is no longer needed so that another client may @@ -16648,27 +16620,21 @@ class BlobLeaseClient { * @returns Response data for release lease operation. */ async releaseLease(options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-releaseLease", options); + var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - try { - return await this._containerOrBlobOperation.releaseLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { + var _a; + return assertResponse(await this._containerOrBlobOperation.releaseLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * To renew the lease. @@ -16680,27 +16646,21 @@ class BlobLeaseClient { * @returns Response data for renew lease operation. */ async renewLease(options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-renewLease", options); + var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - try { - return await this._containerOrBlobOperation.renewLease(this._leaseId, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, + return tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { + var _a; + return this._containerOrBlobOperation.renewLease(this._leaseId, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, }); - throw e; - } - finally { - span.end(); - } + }); } /** * To end the lease but ensure that another client cannot acquire a new lease @@ -16714,32 +16674,28 @@ class BlobLeaseClient { * @returns Response data for break lease operation. */ async breakLease(breakPeriod, options = {}) { - var _a, _b, _c, _d, _e, _f; - const { span, updatedOptions } = createSpan("BlobLeaseClient-breakLease", options); + var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } - try { - const operationOptions = Object.assign({ abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_f = options.conditions) === null || _f === void 0 ? void 0 : _f.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)); - return await this._containerOrBlobOperation.breakLease(operationOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { + var _a; + const operationOptions = { + abortSignal: options.abortSignal, + breakPeriod, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + }; + return assertResponse(await this._containerOrBlobOperation.breakLease(operationOptions)); + }); } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -16763,8 +16719,8 @@ class RetriableReadableStream extends stream.Readable { if (this.options.doInjectErrorOnce) { this.options.doInjectErrorOnce = undefined; this.source.pause(); - this.source.removeAllListeners("data"); - this.source.emit("end"); + this.sourceErrorOrEndHandler(); + this.source.destroy(); return; } // console.log( @@ -16778,6 +16734,10 @@ class RetriableReadableStream extends stream.Readable { this.source.pause(); } }; + this.sourceAbortedHandler = () => { + const abortError = new abortController.AbortError("The operation was aborted."); + this.destroy(abortError); + }; this.sourceErrorOrEndHandler = (err) => { if (err && err.name === "AbortError") { this.destroy(err); @@ -16834,11 +16794,14 @@ class RetriableReadableStream extends stream.Readable { this.source.on("data", this.sourceDataHandler); this.source.on("end", this.sourceErrorOrEndHandler); this.source.on("error", this.sourceErrorOrEndHandler); + // needed for Node14 + this.source.on("aborted", this.sourceAbortedHandler); } removeSourceEventHandlers() { this.source.removeListener("data", this.sourceDataHandler); this.source.removeListener("end", this.sourceErrorOrEndHandler); this.source.removeListener("error", this.sourceErrorOrEndHandler); + this.source.removeListener("aborted", this.sourceAbortedHandler); } _destroy(error, callback) { // remove listener from source and release source @@ -16849,6 +16812,7 @@ class RetriableReadableStream extends stream.Readable { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -16860,19 +16824,6 @@ class RetriableReadableStream extends stream.Readable { * Readable stream. */ class BlobDownloadResponse { - /** - * Creates an instance of BlobDownloadResponse. - * - * @param originalResponse - - * @param getter - - * @param offset - - * @param count - - * @param options - - */ - constructor(originalResponse, getter, offset, count, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); - } /** * Indicates that the service supports * requests for partial file content. @@ -17298,7 +17249,7 @@ class BlobDownloadResponse { * @readonly */ get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : undefined; + return coreUtil.isNode ? this.blobDownloadStream : undefined; } /** * The HTTP response. @@ -17306,6 +17257,19 @@ class BlobDownloadResponse { get _response() { return this.originalResponse._response; } + /** + * Creates an instance of BlobDownloadResponse. + * + * @param originalResponse - + * @param getter - + * @param offset - + * @param count - + * @param options - + */ + constructor(originalResponse, getter, offset, count, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); + } } // Copyright (c) Microsoft Corporation. @@ -17645,7 +17609,14 @@ function arraysEqual(a, b) { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. class AvroReader { + get blockOffset() { + return this._blockOffset; + } + get objectIndex() { + return this._objectIndex; + } constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { this._dataStream = dataStream; this._headerStream = headerStream || dataStream; @@ -17654,12 +17625,6 @@ class AvroReader { this._objectIndex = indexWithinCurrentBlock || 0; this._initialBlockOffset = currentBlockOffset || 0; } - get blockOffset() { - return this._blockOffset; - } - get objectIndex() { - return this._objectIndex; - } async initialize(options = {}) { const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { abortSignal: options.abortSignal, @@ -17703,8 +17668,8 @@ class AvroReader { hasNext() { return !this._initialized || this._itemsRemainingInBlock > 0; } - parseObjects(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* parseObjects_1() { + parseObjects() { + return tslib.__asyncGenerator(this, arguments, function* parseObjects_1(options = {}) { if (!this._initialized) { yield tslib.__await(this.initialize(options)); } @@ -17749,19 +17714,20 @@ class AvroReadable { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); class AvroReadableFromStream extends AvroReadable { - constructor(readable) { - super(); - this._readable = readable; - this._position = 0; - } toUint8Array(data) { if (typeof data === "string") { return Buffer.from(data); } return data; } + constructor(readable) { + super(); + this._readable = readable; + this._position = 0; + } get position() { return this._position; } @@ -17830,6 +17796,7 @@ class AvroReadableFromStream extends AvroReadable { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -17938,6 +17905,7 @@ class BlobQuickQueryStream extends stream.Readable { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -17945,16 +17913,6 @@ class BlobQuickQueryStream extends stream.Readable { * parse avor data returned by blob query. */ class BlobQueryResponse { - /** - * Creates an instance of BlobQueryResponse. - * - * @param originalResponse - - * @param options - - */ - constructor(originalResponse, options = {}) { - this.originalResponse = originalResponse; - this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); - } /** * Indicates that the service supports * requests for partial file content. @@ -18291,7 +18249,7 @@ class BlobQueryResponse { * @readonly */ get readableStreamBody() { - return coreHttp.isNode ? this.blobDownloadStream : undefined; + return coreUtil.isNode ? this.blobDownloadStream : undefined; } /** * The HTTP response. @@ -18299,9 +18257,20 @@ class BlobQueryResponse { get _response() { return this.originalResponse._response; } + /** + * Creates an instance of BlobQueryResponse. + * + * @param originalResponse - + * @param options - + */ + constructor(originalResponse, options = {}) { + this.originalResponse = originalResponse; + this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); + } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * Represents the access tier on a blob. * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} @@ -18406,6 +18375,10 @@ exports.StorageBlobAudience = void 0; */ StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; })(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); +/** + * + * To get OAuth audience for a storage account for blob service. + */ function getBlobServiceAccountAudience(storageAccountName) { return `https://${storageAccountName}.blob.core.windows.net/.default`; } @@ -18435,6 +18408,7 @@ function rangeResponseFromModel(response) { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * This is the poller returned by {@link BlobClient.beginCopyFromURL}. * This can not be instantiated directly outside of this package. @@ -18458,7 +18432,7 @@ class BlobBeginCopyFromUrlPoller extends coreLro.Poller { this.intervalInMs = intervalInMs; } delay() { - return coreHttp.delay(this.intervalInMs); + return coreUtil.delay(this.intervalInMs); } } /** @@ -18583,6 +18557,9 @@ function rangeToString(iRange) { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +// In browser, during webpack or browserify bundling, this module will be replaced by 'events' +// https://github.com/Gozala/events /** * States for Batch. */ @@ -18701,6 +18678,7 @@ class Batch { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * This class generates a readable stream from the data in an array of buffers. */ @@ -18782,11 +18760,8 @@ class BuffersStream extends stream.Readable { } // Copyright (c) Microsoft Corporation. -/** - * maxBufferLength is max size of each buffer in the pooled buffers. - */ -// Can't use import as Typescript doesn't recognize "buffer". -const maxBufferLength = require("buffer").constants.MAX_LENGTH; +// Licensed under the MIT license. +const maxBufferLength = buffer.constants.MAX_LENGTH; /** * This class provides a buffer container which conceptually has no hard size limit. * It accepts a capacity, an array of input buffers and the total length of input data. @@ -18796,6 +18771,12 @@ const maxBufferLength = require("buffer").constants.MAX_LENGTH; * assembled from all the data in the internal "buffer". */ class PooledBuffer { + /** + * The size of the data contained in the pooled buffers. + */ + get size() { + return this._size; + } constructor(capacity, buffers, totalLength) { /** * Internal buffers used to keep the data. @@ -18817,12 +18798,6 @@ class PooledBuffer { this.fill(buffers, totalLength); } } - /** - * The size of the data contained in the pooled buffers. - */ - get size() { - return this._size; - } /** * Fill the internal buffers with data in the input buffers serially * with respect to the total length and the total capacity of the internal buffers. @@ -18867,6 +18842,7 @@ class PooledBuffer { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * This class accepts a Node.js Readable stream as input, and keeps reading data * from the stream into the internal buffer structure, until it reaches maxBuffers. @@ -19116,6 +19092,7 @@ class BufferScheduler { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * Reads a readable stream into buffer. Fill the buffer from offset to end. * @@ -19224,11 +19201,25 @@ async function readStreamToLocalFile(rs, file) { const fsStat = util__namespace.promisify(fs__namespace.stat); const fsCreateReadStream = fs__namespace.createReadStream; +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, * append blob, or page blob. */ class BlobClient extends StorageClient { + /** + * The name of the blob. + */ + get name() { + return this._name; + } + /** + * The name of the storage container the blob is associated with. + */ + get containerName() { + return this._containerName; + } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ @@ -19241,9 +19232,9 @@ class BlobClient extends StorageClient { url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; options = blobNameOrOptions; @@ -19268,11 +19259,11 @@ class BlobClient extends StorageClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { + if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } @@ -19297,22 +19288,10 @@ class BlobClient extends StorageClient { super(url, pipeline); ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); - this.blobContext = new Blob$1(this.storageClientContext); + this.blobContext = this.storageClientContext.blob; this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); } - /** - * The name of the blob. - */ - get name() { - return this._name; - } - /** - * The name of the storage container the blob is associated with. - */ - get containerName() { - return this._containerName; - } /** * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. * Provide "" will remove the snapshot and return a Client to the base blob. @@ -19414,18 +19393,28 @@ class BlobClient extends StorageClient { * ``` */ async download(offset = 0, count, options = {}) { - var _a; options.conditions = options.conditions || {}; options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlobClient-download", options); - try { - const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream - }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blobContext.download({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { + onDownloadProgress: coreUtil.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream + }, + range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), + rangeGetContentMD5: options.rangeGetContentMD5, + rangeGetContentCRC64: options.rangeGetContentCrc64, + snapshot: options.snapshot, + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); // Return browser response immediately - if (!coreHttp.isNode) { + if (!coreUtil.isNode) { return wrappedRes; } // We support retrying when download stream unexpected ends in Node.js runtime @@ -19474,17 +19463,7 @@ class BlobClient extends StorageClient { maxRetryRequests: options.maxRetryRequests, onProgress: options.onProgress, }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Returns true if the Azure blob resource represented by this client exists; false otherwise. @@ -19496,37 +19475,31 @@ class BlobClient extends StorageClient { * @param options - options to Exists operation. */ async exists(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-exists", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - await this.getProperties({ - abortSignal: options.abortSignal, - customerProvidedKey: options.customerProvidedKey, - conditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions, - }); - return true; - } - catch (e) { - if (e.statusCode === 404) { - // Expected exception when checking blob existence - return false; - } - else if (e.statusCode === 409 && - (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || - e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { - // Expected exception when checking blob existence + return tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { + try { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + await this.getProperties({ + abortSignal: options.abortSignal, + customerProvidedKey: options.customerProvidedKey, + conditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + }); return true; } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (e.statusCode === 404) { + // Expected exception when checking blob existence + return false; + } + else if (e.statusCode === 409 && + (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || + e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { + // Expected exception when checking blob existence + return true; + } + throw e; + } + }); } /** * Returns all user-defined metadata, standard HTTP properties, and system properties @@ -19541,24 +19514,19 @@ class BlobClient extends StorageClient { * @param options - Optional options to Get Properties operation. */ async getProperties(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-getProperties", options); - try { - options.conditions = options.conditions || {}; - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const res = await this.blobContext.getProperties(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + options.conditions = options.conditions || {}; + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blobContext.getProperties({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Marks the specified blob or snapshot for deletion. The blob is later deleted @@ -19570,22 +19538,17 @@ class BlobClient extends StorageClient { * @param options - Optional options to Blob Delete operation. */ async delete(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-delete", options); options.conditions = options.conditions || {}; - try { - return await this.blobContext.delete(Object.assign({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.delete({ + abortSignal: options.abortSignal, + deleteSnapshots: options.deleteSnapshots, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted @@ -19597,29 +19560,19 @@ class BlobClient extends StorageClient { * @param options - Optional options to Blob Delete operation. */ async deleteIfExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a blob or snapshot only if it exists.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = assertResponse(await this.delete(updatedOptions)); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** * Restores the contents and metadata of soft deleted blob and any associated @@ -19630,20 +19583,12 @@ class BlobClient extends StorageClient { * @param options - Optional options to Blob Undelete operation. */ async undelete(options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-undelete", options); - try { - return await this.blobContext.undelete(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.undelete({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets system properties on the blob. @@ -19661,23 +19606,19 @@ class BlobClient extends StorageClient { * @param options - Optional options to Blob Set HTTP Headers operation. */ async setHTTPHeaders(blobHTTPHeaders, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setHTTPHeaders", options); options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setHttpHeaders(Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setHttpHeaders({ + abortSignal: options.abortSignal, + blobHttpHeaders: blobHTTPHeaders, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets user-defined metadata for the specified blob as one or more name-value pairs. @@ -19691,23 +19632,20 @@ class BlobClient extends StorageClient { * @param options - Optional options to Set Metadata operation. */ async setMetadata(metadata, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setMetadata", options); options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets tags on the underlying blob. @@ -19719,21 +19657,16 @@ class BlobClient extends StorageClient { * @param options - */ async setTags(tags, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setTags", options); - try { - return await this.blobContext.setTags(Object.assign(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions)), { tags: toBlobTags(tags) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + tags: toBlobTags(tags), + })); + }); } /** * Gets the tags associated with the underlying blob. @@ -19741,23 +19674,17 @@ class BlobClient extends StorageClient { * @param options - */ async getTags(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-getTags", options); - try { - const response = await this.blobContext.getTags(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.blobContext.getTags({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Get a {@link BlobLeaseClient} that manages leases on the blob. @@ -19775,23 +19702,20 @@ class BlobClient extends StorageClient { * @param options - Optional options to the Blob Create Snapshot operation. */ async createSnapshot(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-createSnapshot", options); options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blobContext.createSnapshot(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.createSnapshot({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Asynchronously copies a blob to a destination within the storage account. @@ -19893,20 +19817,13 @@ class BlobClient extends StorageClient { * @param options - Optional options to the Blob Abort Copy From URL operation. */ async abortCopyFromURL(copyId, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-abortCopyFromURL", options); - try { - return await this.blobContext.abortCopyFromURL(copyId, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.abortCopyFromURL(copyId, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not @@ -19917,28 +19834,33 @@ class BlobClient extends StorageClient { * @param options - */ async syncCopyFromURL(copySource, options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("BlobClient-syncCopyFromURL", options); options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - try { - return await this.blobContext.copyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e, _f, _g; + return assertResponse(await this.blobContext.copyFromURL(copySource, { + abortSignal: options.abortSignal, + metadata: options.metadata, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + sourceContentMD5: options.sourceContentMD5, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + immutabilityPolicyExpiry: (_f = options.immutabilityPolicy) === null || _f === void 0 ? void 0 : _f.expiriesOn, + immutabilityPolicyMode: (_g = options.immutabilityPolicy) === null || _g === void 0 ? void 0 : _g.policyMode, + legalHold: options.legalHold, + encryptionScope: options.encryptionScope, + copySourceTags: options.copySourceTags, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets the tier on a blob. The operation is allowed on a page blob in a premium @@ -19952,23 +19874,19 @@ class BlobClient extends StorageClient { * @param options - Optional options to the Blob Set Tier operation. */ async setAccessTier(tier, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobClient-setAccessTier", options); - try { - return await this.blobContext.setTier(toAccessTier(tier), Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.blobContext.setTier(toAccessTier(tier), { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + rehydratePriority: options.rehydratePriority, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } async downloadToBuffer(param1, param2, param3, param4 = {}) { + var _a; let buffer; let offset = 0; let count = 0; @@ -19983,29 +19901,26 @@ class BlobClient extends StorageClient { count = typeof param2 === "number" ? param2 : 0; options = param3 || {}; } - const { span, updatedOptions } = createSpan("BlobClient-downloadToBuffer", options); - try { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0) { - throw new RangeError("blockSize option must be >= 0"); - } - if (options.blockSize === 0) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; - } - if (offset < 0) { - throw new RangeError("offset option must be >= 0"); - } - if (count && count <= 0) { - throw new RangeError("count option must be greater than 0"); - } - if (!options.conditions) { - options.conditions = {}; - } + let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + if (blockSize < 0) { + throw new RangeError("blockSize option must be >= 0"); + } + if (blockSize === 0) { + blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + } + if (offset < 0) { + throw new RangeError("offset option must be >= 0"); + } + if (count && count <= 0) { + throw new RangeError("count option must be greater than 0"); + } + if (!options.conditions) { + options.conditions = {}; + } + return tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { // Customer doesn't specify length, get it if (!count) { - const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); count = response.contentLength - offset; if (count < 0) { throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); @@ -20025,19 +19940,19 @@ class BlobClient extends StorageClient { } let transferProgress = 0; const batch = new Batch(options.concurrency); - for (let off = offset; off < offset + count; off = off + options.blockSize) { + for (let off = offset; off < offset + count; off = off + blockSize) { batch.addOperation(async () => { // Exclusive chunk end position let chunkEnd = offset + count; - if (off + options.blockSize < chunkEnd) { - chunkEnd = off + options.blockSize; + if (off + blockSize < chunkEnd) { + chunkEnd = off + blockSize; } const response = await this.download(off, chunkEnd - off, { abortSignal: options.abortSignal, conditions: options.conditions, maxRetryRequests: options.maxRetryRequestsPerBlock, customerProvidedKey: options.customerProvidedKey, - tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), + tracingOptions: updatedOptions.tracingOptions, }); const stream = response.readableStreamBody; await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); @@ -20052,17 +19967,7 @@ class BlobClient extends StorageClient { } await batch.do(); return buffer; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. @@ -20081,26 +19986,15 @@ class BlobClient extends StorageClient { * at the specified path. */ async downloadToFile(filePath, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlobClient-downloadToFile", options); - try { - const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); + return tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { + const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); if (response.readableStreamBody) { await readStreamToLocalFile(response.readableStreamBody, filePath); } // The stream is no longer accessible so setting it to undefined. response.blobDownloadStream = undefined; return response; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } getBlobAndContainerNamesFromUrl() { let containerName; @@ -20113,11 +20007,11 @@ class BlobClient extends StorageClient { // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` // http://localhost:10001/devstoreaccount1/containername/blob - const parsedUrl = coreHttp.URLBuilder.parse(this.url); - if (parsedUrl.getHost().split(".")[1] === "blob") { + const parsedUrl = new URL(this.url); + if (parsedUrl.host.split(".")[1] === "blob") { // "https://myaccount.blob.core.windows.net/containername/blob". // .getPath() -> /containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; } @@ -20125,14 +20019,14 @@ class BlobClient extends StorageClient { // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob // .getPath() -> /devstoreaccount1/containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)/([^/]*)(/(.*))?"); + const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); containerName = pathComponents[2]; blobName = pathComponents[4]; } else { // "https://customdomain.com/containername/blob". // .getPath() -> /containername/blob - const pathComponents = parsedUrl.getPath().match("/([^/]*)(/(.*))?"); + const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; } @@ -20165,29 +20059,32 @@ class BlobClient extends StorageClient { * @param options - Optional options to the Blob Start Copy From URL operation. */ async startCopyFromURL(copySource, options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("BlobClient-startCopyFromURL", options); - options.conditions = options.conditions || {}; - options.sourceConditions = options.sourceConditions || {}; - try { - return await this.blobContext.startCopyFromURL(copySource, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + return tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { + var _a, _b, _c; + options.conditions = options.conditions || {}; + options.sourceConditions = options.sourceConditions || {}; + return assertResponse(await this.blobContext.startCopyFromURL(copySource, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, sourceIfTags: options.sourceConditions.tagConditions, - }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + rehydratePriority: options.rehydratePriority, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + sealBlob: options.sealBlob, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Only available for BlobClient constructed with a shared key credential. @@ -20214,63 +20111,38 @@ class BlobClient extends StorageClient { * * @param options - Optional options to delete immutability policy on the blob. */ - async deleteImmutabilityPolicy(options) { - const { span, updatedOptions } = createSpan("BlobClient-deleteImmutabilityPolicy", options); - try { - return await this.blobContext.deleteImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async deleteImmutabilityPolicy(options = {}) { + return tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.deleteImmutabilityPolicy({ + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** - * Set immutablility policy on the blob. + * Set immutability policy on the blob. * * @param options - Optional options to set immutability policy on the blob. */ - async setImmutabilityPolicy(immutabilityPolicy, options) { - const { span, updatedOptions } = createSpan("BlobClient-setImmutabilityPolicy", options); - try { - return await this.blobContext.setImmutabilityPolicy(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, modifiedAccessConditions: options === null || options === void 0 ? void 0 : options.modifiedAccessCondition }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async setImmutabilityPolicy(immutabilityPolicy, options = {}) { + return tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.setImmutabilityPolicy({ + immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, + immutabilityPolicyMode: immutabilityPolicy.policyMode, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Set legal hold on the blob. * * @param options - Optional options to set legal hold on the blob. */ - async setLegalHold(legalHoldEnabled, options) { - const { span, updatedOptions } = createSpan("BlobClient-setLegalHold", options); - try { - return await this.blobContext.setLegalHold(legalHoldEnabled, Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async setLegalHold(legalHoldEnabled, options = {}) { + return tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { + return assertResponse(await this.blobContext.setLegalHold(legalHoldEnabled, { + tracingOptions: updatedOptions.tracingOptions, + })); + }); } } /** @@ -20291,9 +20163,9 @@ class AppendBlobClient extends BlobClient { url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; url = urlOrConnectionString; options = blobNameOrOptions; @@ -20315,11 +20187,11 @@ class AppendBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { + if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } @@ -20342,7 +20214,7 @@ class AppendBlobClient extends BlobClient { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - this.appendBlobContext = new AppendBlob(this.storageClientContext); + this.appendBlobContext = this.storageClientContext.appendBlob; } /** * Creates a new AppendBlobClient object identical to the source but with the @@ -20370,23 +20242,25 @@ class AppendBlobClient extends BlobClient { * ``` */ async create(options = {}) { - var _a, _b, _c; - const { span, updatedOptions } = createSpan("AppendBlobClient-create", options); options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.create(0, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.appendBlobContext.create(0, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. @@ -20396,30 +20270,20 @@ class AppendBlobClient extends BlobClient { * @param options - */ async createIfNotExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("AppendBlobClient-createIfNotExists", options); const conditions = { ifNoneMatch: ETagAny }; - try { - const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = assertResponse(await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions }))); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** * Seals the append blob, making it read only. @@ -20427,22 +20291,17 @@ class AppendBlobClient extends BlobClient { * @param options - */ async seal(options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-seal", options); options.conditions = options.conditions || {}; - try { - return await this.appendBlobContext.seal(Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.appendBlobContext.seal({ + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Commits a new block of data to the end of the existing append blob. @@ -20469,25 +20328,25 @@ class AppendBlobClient extends BlobClient { * ``` */ async appendBlock(body, contentLength, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlock", options); options.conditions = options.conditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.appendBlobContext.appendBlock(contentLength, body, { + abortSignal: options.abortSignal, + appendPositionAccessConditions: options.conditions, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { onUploadProgress: options.onProgress, - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * The Append Block operation commits a new block of data to the end of an existing append blob @@ -20504,29 +20363,31 @@ class AppendBlobClient extends BlobClient { * @param options - */ async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("AppendBlobClient-appendBlockFromURL", options); options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, Object.assign({ abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e; + return assertResponse(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { + abortSignal: options.abortSignal, + sourceRange: rangeToString({ offset: sourceOffset, count }), + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + appendPositionAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } } /** @@ -20547,9 +20408,9 @@ class BlockBlobClient extends BlobClient { url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; options = blobNameOrOptions; @@ -20574,11 +20435,11 @@ class BlockBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { + if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } @@ -20601,8 +20462,8 @@ class BlockBlobClient extends BlobClient { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - this.blockBlobContext = new BlockBlob(this.storageClientContext); - this._blobContext = new Blob$1(this.storageClientContext); + this.blockBlobContext = this.storageClientContext.blockBlob; + this._blobContext = this.storageClientContext.blob; } /** * Creates a new BlockBlobClient object identical to the source but with the @@ -20646,36 +20507,31 @@ class BlockBlobClient extends BlobClient { * @param options - */ async query(query, options = {}) { - var _a; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); - try { - if (!coreHttp.isNode) { - throw new Error("This operation currently is only supported in Node.js."); - } - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { + if (!coreUtil.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } + return tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this._blobContext.query({ + abortSignal: options.abortSignal, + queryRequest: { queryType: "SQL", expression: query, inputSerialization: toQuerySerialization(options.inputTextConfiguration), outputSerialization: toQuerySerialization(options.outputTextConfiguration), - }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); + }, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + tracingOptions: updatedOptions.tracingOptions, + })); return new BlobQueryResponse(response, { abortSignal: options.abortSignal, onProgress: options.onProgress, onError: options.onError, }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Creates a new block blob, or updates the content of an existing block blob. @@ -20705,25 +20561,29 @@ class BlockBlobClient extends BlobClient { * ``` */ async upload(body, contentLength, options = {}) { - var _a, _b, _c; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-upload", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.blockBlobContext.upload(contentLength, body, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { onUploadProgress: options.onProgress, - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Creates a new Block Blob where the contents of the blob are read from a given URL. @@ -20744,29 +20604,18 @@ class BlockBlobClient extends BlobClient { * @param options - Optional parameters. */ async syncUploadFromURL(sourceURL, options = {}) { - var _a, _b, _c, _d, _e; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-syncUploadFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: options.conditions.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: (_a = options.sourceConditions) === null || _a === void 0 ? void 0 : _a.ifMatch, - sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, - sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, - sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, - sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, - }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags }), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e, _f; + return assertResponse(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + sourceIfTags: (_f = options.sourceConditions) === null || _f === void 0 ? void 0 : _f.tagConditions, + }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions }))); + }); } /** * Uploads the specified block to the block blob's "staging area" to be later @@ -20780,23 +20629,21 @@ class BlockBlobClient extends BlobClient { * @returns Response data for the Block Blob Stage Block operation. */ async stageBlock(blockId, body, contentLength, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlock", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { + return assertResponse(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + requestOptions: { onUploadProgress: options.onProgress, - }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * The Stage Block From URL operation creates a new block to be committed as part @@ -20820,21 +20667,20 @@ class BlockBlobClient extends BlobClient { * @returns Response data for the Block Blob Stage Block From URL operation. */ async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-stageBlockFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { + return assertResponse(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Writes a blob by specifying the list of block IDs that make up the blob. @@ -20849,23 +20695,26 @@ class BlockBlobClient extends BlobClient { * @returns Response data for the Block Blob Commit Block List operation. */ async commitBlockList(blocks, options = {}) { - var _a, _b, _c; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("BlockBlobClient-commitBlockList", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.blockBlobContext.commitBlockList({ latest: blocks }, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.blockBlobContext.commitBlockList({ latest: blocks }, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Returns the list of blocks that have been uploaded as part of a block blob @@ -20878,10 +20727,14 @@ class BlockBlobClient extends BlobClient { * @returns Response data for the Block Blob Get Block List operation. */ async getBlockList(listType, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlockBlobClient-getBlockList", options); - try { - const res = await this.blockBlobContext.getBlockList(listType, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { + var _a; + const res = assertResponse(await this.blockBlobContext.getBlockList(listType, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); if (!res.committedBlocks) { res.committedBlocks = []; } @@ -20889,17 +20742,7 @@ class BlockBlobClient extends BlobClient { res.uncommittedBlocks = []; } return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } // High level functions /** @@ -20918,9 +20761,8 @@ class BlockBlobClient extends BlobClient { * @param options - */ async uploadData(data, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); - try { - if (coreHttp.isNode) { + return tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { + if (coreUtil.isNode) { let buffer; if (data instanceof Buffer) { buffer = data; @@ -20938,17 +20780,7 @@ class BlockBlobClient extends BlobClient { const browserBlob = new Blob([data]); return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); } - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * ONLY AVAILABLE IN BROWSERS. @@ -20969,22 +20801,11 @@ class BlockBlobClient extends BlobClient { * @param options - Options to upload browser data. * @returns Response data for the Blob Upload operation. */ - async uploadBrowserData(browserData, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadBrowserData", options); - try { - const browserBlob = new Blob([browserData]); - return await this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + async uploadBrowserData(browserData, options = {}) { + return tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { + const browserBlob = new Blob([browserData]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + }); } /** * @@ -21002,27 +20823,23 @@ class BlockBlobClient extends BlobClient { * @returns Response data for the Blob Upload operation. */ async uploadSeekableInternal(bodyFactory, size, options = {}) { - if (!options.blockSize) { - options.blockSize = 0; - } - if (options.blockSize < 0 || options.blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { + var _a, _b; + let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; + if (blockSize < 0 || blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); } - if (options.maxSingleShotSize !== 0 && !options.maxSingleShotSize) { - options.maxSingleShotSize = BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; - } - if (options.maxSingleShotSize < 0 || - options.maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { + const maxSingleShotSize = (_b = options.maxSingleShotSize) !== null && _b !== void 0 ? _b : BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; + if (maxSingleShotSize < 0 || maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); } - if (options.blockSize === 0) { + if (blockSize === 0) { if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { throw new RangeError(`${size} is too larger to upload to a block blob.`); } - if (size > options.maxSingleShotSize) { - options.blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); - if (options.blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { - options.blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; + if (size > maxSingleShotSize) { + blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); + if (blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { + blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; } } } @@ -21032,25 +20849,24 @@ class BlockBlobClient extends BlobClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadSeekableInternal", options); - try { - if (size <= options.maxSingleShotSize) { - return await this.upload(bodyFactory(0, size), size, updatedOptions); + return tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { + if (size <= maxSingleShotSize) { + return assertResponse(await this.upload(bodyFactory(0, size), size, updatedOptions)); } - const numBlocks = Math.floor((size - 1) / options.blockSize) + 1; + const numBlocks = Math.floor((size - 1) / blockSize) + 1; if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); } const blockList = []; - const blockIDPrefix = coreHttp.generateUuid(); + const blockIDPrefix = coreUtil.randomUUID(); let transferProgress = 0; const batch = new Batch(options.concurrency); for (let i = 0; i < numBlocks; i++) { batch.addOperation(async () => { const blockID = generateBlockID(blockIDPrefix, i); - const start = options.blockSize * i; - const end = i === numBlocks - 1 ? size : start + options.blockSize; + const start = blockSize * i; + const end = i === numBlocks - 1 ? size : start + blockSize; const contentLength = end - start; blockList.push(blockID); await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { @@ -21071,17 +20887,7 @@ class BlockBlobClient extends BlobClient { } await batch.do(); return this.commitBlockList(blockList, updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. @@ -21097,27 +20903,16 @@ class BlockBlobClient extends BlobClient { * @returns Response data for the Blob Upload operation. */ async uploadFile(filePath, options = {}) { - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadFile", options); - try { + return tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { const size = (await fsStat(filePath)).size; - return await this.uploadSeekableInternal((offset, count) => { + return this.uploadSeekableInternal((offset, count) => { return () => fsCreateReadStream(filePath, { autoClose: true, end: count ? offset + count - 1 : Infinity, start: offset, }); - }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }, size, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); + }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. @@ -21142,10 +20937,9 @@ class BlockBlobClient extends BlobClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("BlockBlobClient-uploadStream", options); - try { + return tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { let blockNum = 0; - const blockIDPrefix = coreHttp.generateUuid(); + const blockIDPrefix = coreUtil.randomUUID(); let transferProgress = 0; const blockList = []; const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { @@ -21169,18 +20963,8 @@ class BlockBlobClient extends BlobClient { // Outgoing queue shouldn't be empty. Math.ceil((maxConcurrency / 4) * 3)); await scheduler.do(); - return await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return assertResponse(await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }))); + }); } } /** @@ -21201,9 +20985,9 @@ class PageBlobClient extends BlobClient { url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; options = blobNameOrOptions; @@ -21225,11 +21009,11 @@ class PageBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { + if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } @@ -21252,7 +21036,7 @@ class PageBlobClient extends BlobClient { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - this.pageBlobContext = new PageBlob(this.storageClientContext); + this.pageBlobContext = this.storageClientContext.pageBlob; } /** * Creates a new PageBlobClient object identical to the source but with the @@ -21275,23 +21059,27 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Create operation. */ async create(size, options = {}) { - var _a, _b, _c; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-create", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.create(0, size, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { + var _a, _b, _c; + return assertResponse(await this.pageBlobContext.create(0, size, { + abortSignal: options.abortSignal, + blobHttpHeaders: options.blobHTTPHeaders, + blobSequenceNumber: options.blobSequenceNumber, + leaseAccessConditions: options.conditions, + metadata: options.metadata, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, + immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, + legalHold: options.legalHold, + tier: toAccessTier(options.tier), + blobTagsString: toBlobTagsString(options.tags), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Creates a page blob of the specified length. Call uploadPages to upload data @@ -21303,30 +21091,20 @@ class PageBlobClient extends BlobClient { * @param options - */ async createIfNotExists(size, options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("PageBlobClient-createIfNotExists", options); - try { - const conditions = { ifNoneMatch: ETagAny }; - const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const conditions = { ifNoneMatch: ETagAny }; + const res = assertResponse(await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions }))); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. @@ -21339,25 +21117,26 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Upload Pages operation. */ async uploadPages(body, offset, count, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPages", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.uploadPages(count, body, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + requestOptions: { onUploadProgress: options.onProgress, - }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }, + range: rangeToString({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + transactionalContentMD5: options.transactionalContentMD5, + transactionalContentCrc64: options.transactionalContentCrc64, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * The Upload Pages operation writes a range of pages to a page blob where the @@ -21371,29 +21150,30 @@ class PageBlobClient extends BlobClient { * @param options - */ async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { - var _a; options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-uploadPagesFromURL", options); - try { - ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); - return await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), Object.assign({ abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { - sourceIfMatch: options.sourceConditions.ifMatch, - sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, - sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); + return tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { + var _a, _b, _c, _d, _e; + return assertResponse(await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), { + abortSignal: options.abortSignal, + sourceContentMD5: options.sourceContentMD5, + sourceContentCrc64: options.sourceContentCrc64, + leaseAccessConditions: options.conditions, + sequenceNumberAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + sourceModifiedAccessConditions: { + sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, + sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, + sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, + sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, + }, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Frees the specified pages from the page blob. @@ -21405,22 +21185,20 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Clear Pages operation. */ async clearPages(offset = 0, count, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-clearPages", options); - try { - return await this.pageBlobContext.clearPages(0, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.clearPages(0, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + sequenceNumberAccessConditions: options.conditions, + cpkInfo: options.customerProvidedKey, + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Returns the list of valid page ranges for a page blob or snapshot of a page blob. @@ -21432,24 +21210,18 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Get Ranges operation. */ async getPageRanges(offset = 0, count, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRanges", options); - try { - return await this.pageBlobContext - .getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(response); + }); } /** * getPageRangesSegment returns a single segment of page ranges starting from the @@ -21464,21 +21236,18 @@ class PageBlobClient extends BlobClient { * @param options - Options to PageBlob Get Page Ranges Segment operation. */ async listPageRangesSegment(offset = 0, count, marker, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesSegment", options); - try { - return await this.pageBlobContext.getPageRanges(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.getPageRanges({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + range: rangeToString({ offset, count }), + marker: marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} @@ -21494,8 +21263,8 @@ class PageBlobClient extends BlobClient { * items. The marker value is opaque to the client. * @param options - Options to List Page Ranges operation. */ - listPageRangeItemSegments(offset = 0, count, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1() { + listPageRangeItemSegments() { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1(offset = 0, count, marker, options = {}) { let getPageRangeItemSegmentsResponse; if (!!marker || marker === undefined) { do { @@ -21513,20 +21282,22 @@ class PageBlobClient extends BlobClient { * @param count - Number of bytes to get. * @param options - Options to List Page Ranges operation. */ - listPageRangeItems(offset = 0, count, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1() { - var e_1, _a; + listPageRangeItems() { + return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1(offset = 0, count, options = {}) { + var _a, e_1, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const getPageRangesSegment = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const getPageRangesSegment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_1) throw e_1.error; } } @@ -21639,24 +21410,19 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Get Page Range Diff operation. */ async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiff", options); - try { - return await this.pageBlobContext - .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { + var _a; + const result = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevsnapshot: prevSnapshot, + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(result); + }); } /** * getPageRangesDiffSegment returns a single segment of page ranges starting from the @@ -21672,25 +21438,23 @@ class PageBlobClient extends BlobClient { * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ - async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-getPageRangesDiffSegment", options); - try { - return await this.pageBlobContext.getPageRangesDiff(Object.assign({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ + async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { + return tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, + leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevsnapshot: prevSnapshotOrUrl, + range: rangeToString({ offset: offset, count: count, - }), marker: marker, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }), + marker: marker, + maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} @@ -21730,18 +21494,20 @@ class PageBlobClient extends BlobClient { */ listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { - var e_2, _a; + var _a, e_2, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const getPageRangesSegment = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const getPageRangesSegment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } @@ -21855,24 +21621,19 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Get Page Range Diff operation. */ async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options); - try { - return await this.pageBlobContext - .getPageRangesDiff(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }) }, convertTracingToRequestOptionsBase(updatedOptions))) - .then(rangeResponseFromModel); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.pageBlobContext.getPageRangesDiff({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + prevSnapshotUrl, + range: rangeToString({ offset, count }), + tracingOptions: updatedOptions.tracingOptions, + })); + return rangeResponseFromModel(response); + }); } /** * Resizes the page blob to the specified size (which must be a multiple of 512). @@ -21883,22 +21644,17 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Resize operation. */ async resize(size, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-resize", options); - try { - return await this.pageBlobContext.resize(size, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.resize(size, { + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + encryptionScope: options.encryptionScope, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets a page blob's sequence number. @@ -21910,22 +21666,17 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Update Sequence Number operation. */ async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { - var _a; options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("PageBlobClient-updateSequenceNumber", options); - try { - return await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, Object.assign({ abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { + abortSignal: options.abortSignal, + blobSequenceNumber: sequenceNumber, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. @@ -21941,25 +21692,19 @@ class PageBlobClient extends BlobClient { * @returns Response data for the Page Blob Copy Incremental operation. */ async startCopyIncremental(copySource, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("PageBlobClient-startCopyIncremental", options); - try { - return await this.pageBlobContext.copyIncremental(copySource, Object.assign({ abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { + var _a; + return assertResponse(await this.pageBlobContext.copyIncremental(copySource, { + abortSignal: options.abortSignal, + modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), + tracingOptions: updatedOptions.tracingOptions, + })); + }); } } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. async function getBodyAsText(batchResponse) { let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); @@ -21972,6 +21717,7 @@ function utf8ByteLength(str) { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. const HTTP_HEADER_DELIMITER = ": "; const SPACE_DELIMITER = " "; const NOT_FOUND = -1; @@ -22021,7 +21767,7 @@ class BatchResponseParser { for (let index = 0; index < subResponseCount; index++) { const subResponse = subResponses[index]; const deserializedSubResponse = {}; - deserializedSubResponse.headers = new coreHttp.HttpHeaders(); + deserializedSubResponse.headers = coreHttpCompat.toHttpHeadersLike(coreRestPipeline.createHttpHeaders()); const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); let subRespHeaderStartFound = false; let subRespHeaderEndFound = false; @@ -22168,6 +21914,7 @@ Mutex.keys = {}; Mutex.listeners = {}; // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * A BlobBatch represents an aggregated set of operations on blobs. * Currently, only `delete` and `setAccessTier` are supported. @@ -22220,9 +21967,9 @@ class BlobBatch { let url; let credential; if (typeof urlOrBlobClient === "string" && - ((coreHttp.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || + ((coreUtil.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || credentialOrOptions instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrOptions))) { + coreAuth.isTokenCredential(credentialOrOptions))) { // First overload url = urlOrBlobClient; credential = credentialOrOptions; @@ -22239,8 +21986,7 @@ class BlobBatch { if (!options) { options = {}; } - const { span, updatedOptions } = createSpan("BatchDeleteRequest-addSubRequest", options); - try { + return tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("delete"); await this.addSubRequestInternal({ url: url, @@ -22248,26 +21994,16 @@ class BlobBatch { }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { let url; let credential; let tier; if (typeof urlOrBlobClient === "string" && - ((coreHttp.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || + ((coreUtil.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || credentialOrTier instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrTier))) { + coreAuth.isTokenCredential(credentialOrTier))) { // First overload url = urlOrBlobClient; credential = credentialOrTier; @@ -22286,8 +22022,7 @@ class BlobBatch { if (!options) { options = {}; } - const { span, updatedOptions } = createSpan("BatchSetTierRequest-addSubRequest", options); - try { + return tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("setAccessTier"); await this.addSubRequestInternal({ url: url, @@ -22295,17 +22030,7 @@ class BlobBatch { }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); }); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } } /** @@ -22316,7 +22041,7 @@ class InnerBatchRequest { constructor() { this.operationCount = 0; this.body = ""; - const tempGuid = coreHttp.generateUuid(); + const tempGuid = coreUtil.randomUUID(); // batch_{batchid} this.boundary = `batch_${tempGuid}`; // --batch_{batchid} @@ -22337,29 +22062,48 @@ class InnerBatchRequest { * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. */ createPipeline(credential) { - const isAnonymousCreds = credential instanceof AnonymousCredential; - const policyFactoryLength = 3 + (isAnonymousCreds ? 0 : 1); // [deserializationPolicy, BatchHeaderFilterPolicyFactory, (Optional)Credential, BatchRequestAssemblePolicyFactory] - const factories = new Array(policyFactoryLength); - factories[0] = coreHttp.deserializationPolicy(); // Default deserializationPolicy is provided by protocol layer - factories[1] = new BatchHeaderFilterPolicyFactory(); // Use batch header filter policy to exclude unnecessary headers - if (!isAnonymousCreds) { - factories[2] = coreHttp.isTokenCredential(credential) - ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) - : credential; - } - factories[policyFactoryLength - 1] = new BatchRequestAssemblePolicyFactory(this); // Use batch assemble policy to assemble request and intercept request from going to wire - return new Pipeline(factories, {}); + const corePipeline = coreRestPipeline.createEmptyPipeline(); + corePipeline.addPolicy(coreClient.serializationPolicy({ + stringifyXML: coreXml.stringifyXML, + serializerOptions: { + xml: { + xmlCharKey: "#", + }, + }, + }), { phase: "Serialize" }); + // Use batch header filter policy to exclude unnecessary headers + corePipeline.addPolicy(batchHeaderFilterPolicy()); + // Use batch assemble policy to assemble request and intercept request from going to wire + corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); + if (coreAuth.isTokenCredential(credential)) { + corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ + credential, + scopes: StorageOAuthScopes, + challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge }, + }), { phase: "Sign" }); + } + else if (credential instanceof StorageSharedKeyCredential) { + corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ + accountName: credential.accountName, + accountKey: credential.accountKey, + }), { phase: "Sign" }); + } + const pipeline = new Pipeline([]); + // attach the v2 pipeline to this one + pipeline._credential = credential; + pipeline._corePipeline = corePipeline; + return pipeline; } appendSubRequestToBody(request) { // Start to assemble sub request this.body += [ - this.subRequestPrefix, - `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, - "", + this.subRequestPrefix, // sub request constant prefix + `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID + "", // empty line after sub request's content ID `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method ].join(HTTP_LINE_ENDING); - for (const header of request.headers.headersArray()) { - this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; + for (const [name, value] of request.headers) { + this.body += `${name}: ${value}${HTTP_LINE_ENDING}`; } this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line // No body to assemble for current batch request support @@ -22390,55 +22134,39 @@ class InnerBatchRequest { return this.subRequests; } } -class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { - constructor(batchRequest, nextPolicy, options) { - super(nextPolicy, options); - this.dummyResponse = { - request: new coreHttp.WebResource(), - status: 200, - headers: new coreHttp.HttpHeaders(), - }; - this.batchRequest = batchRequest; - } - async sendRequest(request) { - await this.batchRequest.appendSubRequestToBody(request); - return this.dummyResponse; // Intercept request from going to wire - } -} -class BatchRequestAssemblePolicyFactory { - constructor(batchRequest) { - this.batchRequest = batchRequest; - } - create(nextPolicy, options) { - return new BatchRequestAssemblePolicy(this.batchRequest, nextPolicy, options); - } +function batchRequestAssemblePolicy(batchRequest) { + return { + name: "batchRequestAssemblePolicy", + async sendRequest(request) { + batchRequest.appendSubRequestToBody(request); + return { + request, + status: 200, + headers: coreRestPipeline.createHttpHeaders(), + }; + }, + }; } -class BatchHeaderFilterPolicy extends coreHttp.BaseRequestPolicy { - // The base class has a protected constructor. Adding a public one to enable constructing of this class. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(request) { - let xMsHeaderName = ""; - for (const header of request.headers.headersArray()) { - if (iEqual(header.name, HeaderConstants.X_MS_VERSION)) { - xMsHeaderName = header.name; +function batchHeaderFilterPolicy() { + return { + name: "batchHeaderFilterPolicy", + async sendRequest(request, next) { + let xMsHeaderName = ""; + for (const [name] of request.headers) { + if (iEqual(name, HeaderConstants.X_MS_VERSION)) { + xMsHeaderName = name; + } } - } - if (xMsHeaderName !== "") { - request.headers.remove(xMsHeaderName); // The subrequests should not have the x-ms-version header. - } - return this._nextPolicy.sendRequest(request); - } -} -class BatchHeaderFilterPolicyFactory { - create(nextPolicy, options) { - return new BatchHeaderFilterPolicy(nextPolicy, options); - } + if (xMsHeaderName !== "") { + request.headers.delete(xMsHeaderName); // The subrequests should not have the x-ms-version header. + } + return next(request); + }, + }; } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. * @@ -22460,14 +22188,14 @@ class BlobBatchClient { else { pipeline = newPipeline(credentialOrPipeline, options); } - const storageClientContext = new StorageClientContext(url, pipeline.toServiceClientOptions()); + const storageClientContext = new StorageContextClient(url, getCoreClientOptions(pipeline)); const path = getURLPath(url); if (path && path !== "/") { // Container scoped. - this.serviceOrContainerContext = new Container(storageClientContext); + this.serviceOrContainerContext = storageClientContext.container; } else { - this.serviceOrContainerContext = new Service(storageClientContext); + this.serviceOrContainerContext = storageClientContext.service; } } /** @@ -22546,11 +22274,10 @@ class BlobBatchClient { if (!batchRequest || batchRequest.getSubRequests().size === 0) { throw new RangeError("Batch request should contain one or more sub requests."); } - const { span, updatedOptions } = createSpan("BlobBatchClient-submitBatch", options); - try { + return tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { const batchRequestBody = batchRequest.getHttpRequestBody(); // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. - const rawBatchResponse = await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); + const rawBatchResponse = assertResponse(await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign({}, updatedOptions))); // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); const responseSummary = await batchResponseParser.parseBatchResponse(); @@ -22566,17 +22293,7 @@ class BlobBatchClient { subResponsesFailedCount: responseSummary.subResponsesFailedCount, }; return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } } @@ -22584,6 +22301,12 @@ class BlobBatchClient { * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. */ class ContainerClient extends StorageClient { + /** + * The name of the container. + */ + get containerName() { + return this._containerName; + } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ @@ -22596,9 +22319,9 @@ class ContainerClient extends StorageClient { url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } - else if ((coreHttp.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || + else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipelineOrContainerName)) { + coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); @@ -22616,11 +22339,11 @@ class ContainerClient extends StorageClient { const containerName = credentialOrPipelineOrContainerName; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { + if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } @@ -22644,13 +22367,7 @@ class ContainerClient extends StorageClient { } super(url, pipeline); this._containerName = this.getContainerNameFromUrl(); - this.containerContext = new Container(this.storageClientContext); - } - /** - * The name of the container. - */ - get containerName() { - return this._containerName; + this.containerContext = this.storageClientContext.container; } /** * Creates a new container under the specified account. If the container with @@ -22670,22 +22387,9 @@ class ContainerClient extends StorageClient { * ``` */ async create(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-create", options); - try { - // Spread operator in destructuring assignments, - // this will filter out unwanted properties from the response object into result object - return await this.containerContext.create(Object.assign(Object.assign({}, options), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.create(updatedOptions)); + }); } /** * Creates a new container under the specified account. If the container with @@ -22696,29 +22400,21 @@ class ContainerClient extends StorageClient { * @param options - */ async createIfNotExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); - try { - const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a container only if it does not already exist.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = await this.create(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); + } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + else { + throw e; + } } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Returns true if the Azure container resource represented by this client exists; false otherwise. @@ -22730,31 +22426,21 @@ class ContainerClient extends StorageClient { * @param options - */ async exists(options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-exists", options); - try { - await this.getProperties({ - abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions, - }); - return true; - } - catch (e) { - if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking container existence", + return tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { + try { + await this.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, }); - return false; + return true; } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (e.statusCode === 404) { + return false; + } + throw e; + } + }); } /** * Creates a {@link BlobClient} @@ -22815,20 +22501,9 @@ class ContainerClient extends StorageClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("ContainerClient-getProperties", options); - try { - return await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), { tracingOptions: updatedOptions.tracingOptions }))); + }); } /** * Marks the specified container for deletion. The container and any blobs @@ -22841,20 +22516,14 @@ class ContainerClient extends StorageClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("ContainerClient-delete", options); - try { - return await this.containerContext.delete(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.delete({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Marks the specified container for deletion if it exists. The container and any blobs @@ -22864,29 +22533,19 @@ class ContainerClient extends StorageClient { * @param options - Options to Container Delete operation. */ async deleteIfExists(options = {}) { - var _a, _b; - const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); - try { - const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); - } - catch (e) { - if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a container only if it exists.", - }); - return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + return tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { + var _a, _b; + try { + const res = await this.delete(updatedOptions); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + catch (e) { + if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { + return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); + } + throw e; + } + }); } /** * Sets one or more user-defined name-value pairs for the specified container. @@ -22907,20 +22566,15 @@ class ContainerClient extends StorageClient { if (options.conditions.ifUnmodifiedSince) { throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); } - const { span, updatedOptions } = createSpan("ContainerClient-setMetadata", options); - try { - return await this.containerContext.setMetadata(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { + return assertResponse(await this.containerContext.setMetadata({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + metadata, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Gets the permissions for the specified container. The permissions indicate @@ -22937,9 +22591,12 @@ class ContainerClient extends StorageClient { if (!options.conditions) { options.conditions = {}; } - const { span, updatedOptions } = createSpan("ContainerClient-getAccessPolicy", options); - try { - const response = await this.containerContext.getAccessPolicy(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.getAccessPolicy({ + abortSignal: options.abortSignal, + leaseAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); const res = { _response: response._response, blobPublicAccess: response.blobPublicAccess, @@ -22971,17 +22628,7 @@ class ContainerClient extends StorageClient { }); } return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Sets the permissions for the specified container. The permissions indicate @@ -23002,8 +22649,7 @@ class ContainerClient extends StorageClient { */ async setAccessPolicy(access, containerAcl, options = {}) { options.conditions = options.conditions || {}; - const { span, updatedOptions } = createSpan("ContainerClient-setAccessPolicy", options); - try { + return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { const acl = []; for (const identifier of containerAcl || []) { acl.push({ @@ -23019,18 +22665,15 @@ class ContainerClient extends StorageClient { id: identifier.id, }); } - return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return assertResponse(await this.containerContext.setAccessPolicy({ + abortSignal: options.abortSignal, + access, + containerAcl: acl, + leaseAccessConditions: options.conditions, + modifiedAccessConditions: options.conditions, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Get a {@link BlobLeaseClient} that manages leases on the container. @@ -23064,25 +22707,14 @@ class ContainerClient extends StorageClient { * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ async uploadBlockBlob(blobName, body, contentLength, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-uploadBlockBlob", options); - try { + return tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { const blockBlobClient = this.getBlockBlobClient(blobName); const response = await blockBlobClient.upload(body, contentLength, updatedOptions); return { blockBlobClient, response, }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Marks the specified blob or snapshot for deletion. The blob is later deleted @@ -23096,24 +22728,13 @@ class ContainerClient extends StorageClient { * @returns Block blob deletion response data. */ async deleteBlob(blobName, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-deleteBlob", options); - try { + return tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { let blobClient = this.getBlobClient(blobName); if (options.versionId) { blobClient = blobClient.withVersion(options.versionId); } - return await blobClient.delete(updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return blobClient.delete(updatedOptions); + }); } /** * listBlobFlatSegment returns a single segment of blobs starting from the @@ -23126,25 +22747,14 @@ class ContainerClient extends StorageClient { * @param options - Options to Container List Blob Flat Segment operation. */ async listBlobFlatSegment(marker, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); - try { - const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); return blobItem; }) }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * listBlobHierarchySegment returns a single segment of blobs starting from @@ -23158,29 +22768,18 @@ class ContainerClient extends StorageClient { * @param options - Options to Container List Blob Hierarchy Segment operation. */ async listBlobHierarchySegment(delimiter, marker, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); - try { - const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + return tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { + var _a; + const response = assertResponse(await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { + const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); return blobItem; }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); return blobPrefix; }) }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse @@ -23194,8 +22793,8 @@ class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listSegments(marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + listSegments(marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { let listBlobsFlatSegmentResponse; if (!!marker || marker === undefined) { do { @@ -23211,20 +22810,22 @@ class ContainerClient extends StorageClient { * * @param options - Options to list blobs operation. */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItems_1() { - var e_1, _a; + listItems() { + return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { + var _a, e_1, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const listBlobsFlatSegmentResponse = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const listBlobsFlatSegmentResponse = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_1) throw e_1.error; } } @@ -23372,8 +22973,8 @@ class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ - listHierarchySegments(delimiter, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1() { + listHierarchySegments(delimiter_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1(delimiter, marker, options = {}) { let listBlobsHierarchySegmentResponse; if (!!marker || marker === undefined) { do { @@ -23390,13 +22991,15 @@ class ContainerClient extends StorageClient { * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ - listItemsByHierarchy(delimiter, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1() { - var e_2, _a; + listItemsByHierarchy(delimiter_1) { + return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1(delimiter, options = {}) { + var _a, e_2, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const listBlobsHierarchySegmentResponse = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const listBlobsHierarchySegmentResponse = _c; const segment = listBlobsHierarchySegmentResponse.segment; if (segment.blobPrefixes) { for (const prefix of segment.blobPrefixes) { @@ -23411,7 +23014,7 @@ class ContainerClient extends StorageClient { catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } @@ -23575,9 +23178,14 @@ class ContainerClient extends StorageClient { * @param options - Options to find blobs by tags. */ async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); - try { - const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.containerContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { var _a; let tagValue = ""; @@ -23587,17 +23195,7 @@ class ContainerClient extends StorageClient { return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. @@ -23615,8 +23213,8 @@ class ContainerClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { let response; if (!!marker || marker === undefined) { do { @@ -23637,20 +23235,22 @@ class ContainerClient extends StorageClient { * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { - var e_3, _a; + findBlobsByTagsItems(tagFilterSqlExpression_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { + var _a, e_3, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } } catch (e_3_1) { e_3 = { error: e_3_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_3) throw e_3.error; } } @@ -23766,23 +23366,23 @@ class ContainerClient extends StorageClient { // "https://myaccount.blob.core.windows.net/mycontainer"; // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` // http://localhost:10001/devstoreaccount1/containername - const parsedUrl = coreHttp.URLBuilder.parse(this.url); - if (parsedUrl.getHost().split(".")[1] === "blob") { + const parsedUrl = new URL(this.url); + if (parsedUrl.hostname.split(".")[1] === "blob") { // "https://myaccount.blob.core.windows.net/containername". // "https://customdomain.com/containername". // .getPath() -> /containername - containerName = parsedUrl.getPath().split("/")[1]; + containerName = parsedUrl.pathname.split("/")[1]; } else if (isIpEndpointStyle(parsedUrl)) { // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername // .getPath() -> /devstoreaccount1/containername - containerName = parsedUrl.getPath().split("/")[2]; + containerName = parsedUrl.pathname.split("/")[2]; } else { // "https://customdomain.com/containername". // .getPath() -> /containername - containerName = parsedUrl.getPath().split("/")[1]; + containerName = parsedUrl.pathname.split("/")[1]; } // decode the encoded containerName - to get all the special characters that might be present in it containerName = decodeURIComponent(containerName); @@ -23849,7 +23449,7 @@ class AccountSASPermissions { */ this.write = false; /** - * Permission to create blobs and files granted. + * Permission to delete blobs and files granted. */ this.delete = false; /** @@ -24207,6 +23807,7 @@ class AccountSASServices { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * @@ -24296,26 +23897,6 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC * to manipulate blob containers. */ class BlobServiceClient extends StorageClient { - constructor(url, credentialOrPipeline, - // Legacy, no fix for eslint error without breaking. Disable it for this interface. - /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ - options) { - let pipeline; - if (isPipelineLike(credentialOrPipeline)) { - pipeline = credentialOrPipeline; - } - else if ((coreHttp.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || - credentialOrPipeline instanceof AnonymousCredential || - coreHttp.isTokenCredential(credentialOrPipeline)) { - pipeline = newPipeline(credentialOrPipeline, options); - } - else { - // The second parameter is undefined. Use anonymous credential - pipeline = newPipeline(new AnonymousCredential(), options); - } - super(url, pipeline); - this.serviceContext = new Service(this.storageClientContext); - } /** * * Creates an instance of BlobServiceClient from connection string. @@ -24335,10 +23916,10 @@ class BlobServiceClient extends StorageClient { options = options || {}; const extractedCreds = extractConnectionStringParts(connectionString); if (extractedCreds.kind === "AccountConnString") { - if (coreHttp.isNode) { + if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); if (!options.proxyOptions) { - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } const pipeline = newPipeline(sharedKeyCredential, options); return new BlobServiceClient(extractedCreds.url, pipeline); @@ -24355,6 +23936,26 @@ class BlobServiceClient extends StorageClient { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } + constructor(url, credentialOrPipeline, + // Legacy, no fix for eslint error without breaking. Disable it for this interface. + /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ + options) { + let pipeline; + if (isPipelineLike(credentialOrPipeline)) { + pipeline = credentialOrPipeline; + } + else if ((coreUtil.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || + credentialOrPipeline instanceof AnonymousCredential || + coreAuth.isTokenCredential(credentialOrPipeline)) { + pipeline = newPipeline(credentialOrPipeline, options); + } + else { + // The second parameter is undefined. Use anonymous credential + pipeline = newPipeline(new AnonymousCredential(), options); + } + super(url, pipeline); + this.serviceContext = this.storageClientContext.service; + } /** * Creates a {@link ContainerClient} object * @@ -24378,25 +23979,14 @@ class BlobServiceClient extends StorageClient { * @returns Container creation response and the corresponding container client. */ async createContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-createContainer", options); - try { + return tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); const containerCreateResponse = await containerClient.create(updatedOptions); return { containerClient, containerCreateResponse, }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Deletes a Blob container. @@ -24406,21 +23996,10 @@ class BlobServiceClient extends StorageClient { * @returns Container deletion response. */ async deleteContainer(containerName, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-deleteContainer", options); - try { + return tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); - return await containerClient.delete(updatedOptions); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return containerClient.delete(updatedOptions); + }); } /** * Restore a previously deleted Blob container. @@ -24432,25 +24011,17 @@ class BlobServiceClient extends StorageClient { * @returns Container deletion response. */ async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-undeleteContainer", options); - try { + return tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); // Hack to access a protected member. - const containerContext = new Container(containerClient["storageClientContext"]); - const containerUndeleteResponse = await containerContext.restore(Object.assign({ deletedContainerName, - deletedContainerVersion }, updatedOptions)); + const containerContext = containerClient["storageClientContext"].container; + const containerUndeleteResponse = assertResponse(await containerContext.restore({ + deletedContainerName, + deletedContainerVersion, + tracingOptions: updatedOptions.tracingOptions, + })); return { containerClient, containerUndeleteResponse }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Rename an existing Blob Container. @@ -24462,25 +24033,14 @@ class BlobServiceClient extends StorageClient { /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. async renameContainer(sourceContainerName, destinationContainerName, options = {}) { - var _a; - const { span, updatedOptions } = createSpan("BlobServiceClient-renameContainer", options); - try { + return tracingClient.withSpan("BlobServiceClient-renameContainer", options, async (updatedOptions) => { + var _a; const containerClient = this.getContainerClient(destinationContainerName); // Hack to access a protected member. - const containerContext = new Container(containerClient["storageClientContext"]); - const containerRenameResponse = await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId })); + const containerContext = containerClient["storageClientContext"].container; + const containerRenameResponse = assertResponse(await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId }))); return { containerClient, containerRenameResponse }; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Gets the properties of a storage account’s Blob service, including properties @@ -24491,20 +24051,12 @@ class BlobServiceClient extends StorageClient { * @returns Response data for the Service Get Properties operation. */ async getProperties(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getProperties", options); - try { - return await this.serviceContext.getProperties(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getProperties({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Sets properties for a storage account’s Blob service endpoint, including properties @@ -24516,20 +24068,12 @@ class BlobServiceClient extends StorageClient { * @returns Response data for the Service Set Properties operation. */ async setProperties(properties, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-setProperties", options); - try { - return await this.serviceContext.setProperties(properties, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.setProperties(properties, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Retrieves statistics related to replication for the Blob service. It is only @@ -24541,20 +24085,12 @@ class BlobServiceClient extends StorageClient { * @returns Response data for the Service Get Statistics operation. */ async getStatistics(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getStatistics", options); - try { - return await this.serviceContext.getStatistics(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getStatistics({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * The Get Account Information operation returns the sku name and account kind @@ -24567,20 +24103,12 @@ class BlobServiceClient extends StorageClient { * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getAccountInfo", options); - try { - return await this.serviceContext.getAccountInfo(Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.getAccountInfo({ + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); + }); } /** * Returns a list of the containers under the specified account. @@ -24597,20 +24125,9 @@ class BlobServiceClient extends StorageClient { * @returns Response data for the Service List Container Segment operation. */ async listContainersSegment(marker, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-listContainersSegment", options); - try { - return await this.serviceContext.listContainersSegment(Object.assign(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include }), convertTracingToRequestOptionsBase(updatedOptions))); - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + return tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { + return assertResponse(await this.serviceContext.listContainersSegment(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions }))); + }); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags @@ -24631,9 +24148,14 @@ class BlobServiceClient extends StorageClient { * @param options - Options to find blobs by tags. */ async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-findBlobsByTagsSegment", options); - try { - const response = await this.serviceContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + return tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { + const response = assertResponse(await this.serviceContext.filterBlobs({ + abortSignal: options.abortSignal, + where: tagFilterSqlExpression, + marker, + maxPageSize: options.maxPageSize, + tracingOptions: updatedOptions.tracingOptions, + })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { var _a; let tagValue = ""; @@ -24643,17 +24165,7 @@ class BlobServiceClient extends StorageClient { return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); }) }); return wrappedResponse; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. @@ -24671,8 +24183,8 @@ class BlobServiceClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ - findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { let response; if (!!marker || marker === undefined) { do { @@ -24693,20 +24205,22 @@ class BlobServiceClient extends StorageClient { * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ - findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { - var e_1, _a; + findBlobsByTagsItems(tagFilterSqlExpression_1) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { + var _a, e_1, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_1) throw e_1.error; } } @@ -24828,8 +24342,8 @@ class BlobServiceClient extends StorageClient { * items. The marker value is opaque to the client. * @param options - Options to list containers operation. */ - listSegments(marker, options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listSegments_1() { + listSegments(marker_1) { + return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { let listContainersSegmentResponse; if (!!marker || marker === undefined) { do { @@ -24847,20 +24361,22 @@ class BlobServiceClient extends StorageClient { * * @param options - Options to list containers operation. */ - listItems(options = {}) { - return tslib.__asyncGenerator(this, arguments, function* listItems_1() { - var e_2, _a; + listItems() { + return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { + var _a, e_2, _b, _c; let marker; try { - for (var _b = tslib.__asyncValues(this.listSegments(marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { - const segment = _c.value; + for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { + _c = _f.value; + _d = false; + const segment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { - if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } @@ -24990,12 +24506,14 @@ class BlobServiceClient extends StorageClient { * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ async getUserDelegationKey(startsOn, expiresOn, options = {}) { - const { span, updatedOptions } = createSpan("BlobServiceClient-getUserDelegationKey", options); - try { - const response = await this.serviceContext.getUserDelegationKey({ + return tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { + const response = assertResponse(await this.serviceContext.getUserDelegationKey({ startsOn: truncatedISO8061Date(startsOn, false), expiresOn: truncatedISO8061Date(expiresOn, false), - }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); + }, { + abortSignal: options.abortSignal, + tracingOptions: updatedOptions.tracingOptions, + })); const userDelegationKey = { signedObjectId: response.signedObjectId, signedTenantId: response.signedTenantId, @@ -25007,17 +24525,7 @@ class BlobServiceClient extends StorageClient { }; const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); return res; - } - catch (e) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: e.message, - }); - throw e; - } - finally { - span.end(); - } + }); } /** * Creates a BlobBatchClient object to conduct batch operations. @@ -25066,29 +24574,9 @@ exports.KnownEncryptionAlgorithmType = void 0; KnownEncryptionAlgorithmType["AES256"] = "AES256"; })(exports.KnownEncryptionAlgorithmType || (exports.KnownEncryptionAlgorithmType = {})); -Object.defineProperty(exports, 'BaseRequestPolicy', { - enumerable: true, - get: function () { return coreHttp.BaseRequestPolicy; } -}); -Object.defineProperty(exports, 'HttpHeaders', { - enumerable: true, - get: function () { return coreHttp.HttpHeaders; } -}); -Object.defineProperty(exports, 'RequestPolicyOptions', { - enumerable: true, - get: function () { return coreHttp.RequestPolicyOptions; } -}); -Object.defineProperty(exports, 'RestError', { - enumerable: true, - get: function () { return coreHttp.RestError; } -}); -Object.defineProperty(exports, 'WebResource', { - enumerable: true, - get: function () { return coreHttp.WebResource; } -}); -Object.defineProperty(exports, 'deserializationPolicy', { +Object.defineProperty(exports, "RestError", { enumerable: true, - get: function () { return coreHttp.deserializationPolicy; } + get: function () { return coreRestPipeline.RestError; } }); exports.AccountSASPermissions = AccountSASPermissions; exports.AccountSASResourceTypes = AccountSASResourceTypes; @@ -25096,6 +24584,7 @@ exports.AccountSASServices = AccountSASServices; exports.AnonymousCredential = AnonymousCredential; exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; exports.AppendBlobClient = AppendBlobClient; +exports.BaseRequestPolicy = BaseRequestPolicy; exports.BlobBatch = BlobBatch; exports.BlobBatchClient = BlobBatchClient; exports.BlobClient = BlobClient; diff --git a/node_modules/@azure/storage-blob/package.json b/node_modules/@azure/storage-blob/package.json index cb8d8afdc..f3cd3ce28 100644 --- a/node_modules/@azure/storage-blob/package.json +++ b/node_modules/@azure/storage-blob/package.json @@ -1,7 +1,7 @@ { "name": "@azure/storage-blob", "sdk-type": "client", - "version": "12.17.0", + "version": "12.23.0", "description": "Microsoft Azure Storage SDK for JavaScript - Blob", "main": "./dist/index.js", "module": "./dist-esm/storage-blob/src/index.js", @@ -9,6 +9,7 @@ "./dist-esm/storage-blob/src/index.js": "./dist-esm/storage-blob/src/index.browser.js", "./dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.js": "./dist-esm/storage-blob/src/credentials/StorageSharedKeyCredential.browser.js", "./dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.js": "./dist-esm/storage-blob/src/credentials/UserDelegationKeyCredential.browser.js", + "./dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicyV2.js": "./dist-esm/storage-blob/src/policies/StorageSharedKeyCredentialPolicyV2.browser.js", "./dist-esm/storage-blob/src/utils/utils.node.js": "./dist-esm/storage-blob/src/utils/utils.browser.js", "./dist-esm/storage-blob/test/utils/index.js": "./dist-esm/storage-blob/test/utils/index.browser.js", "./dist-esm/storage-blob/src/BatchUtils.js": "./dist-esm/storage-blob/src/BatchUtils.browser.js", @@ -24,32 +25,24 @@ "./dist/index.js": "./dist-esm/storage-blob/src/index.js" }, "types": "./types/latest/storage-blob.d.ts", - "typesVersions": { - "<3.6": { - "*": [ - "./types/3.1/storage-blob.d.ts" - ] - } - }, "engines": { - "node": ">=14.0.0" + "node": ">=18.0.0" }, "scripts": { "audit": "node ../../../common/scripts/rush-audit.js && rimraf node_modules package-lock.json && npm i --package-lock-only 2>&1 && npm audit", "build:browser": "tsc -p . && dev-tool run bundle", "build:node": "tsc -p . && dev-tool run bundle", "build:test": "tsc -p . && dev-tool run bundle", - "build:types": "downlevel-dts types/latest types/3.1", - "build": "npm run clean && tsc -p . && dev-tool run bundle && api-extractor run --local && npm run build:types", + "build": "npm run clean && tsc -p . && dev-tool run bundle && dev-tool run extract-api", "build:samples": "echo Obsolete;", - "check-format": "prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", - "clean": "rimraf dist dist-* types temp statistics.html coverage coverage-browser .nyc_output *.tgz *.log test*.xml TEST*.xml", + "check-format": "dev-tool run vendored prettier --list-different --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "clean": "rimraf --glob dist dist-* types temp statistics.html coverage coverage-browser .nyc_output *.tgz *.log test*.xml TEST*.xml", "clean:samples": "rimraf samples/v12/javascript/node_modules samples/v12/typescript/node_modules samples/v12/typescript/dist samples/v12/typescript/package-lock.json samples/v12/javascript/package-lock.json", - "extract-api": "tsc -p . && api-extractor run --local", + "extract-api": "tsc -p . && dev-tool run extract-api", "execute:samples": "dev-tool samples run samples-dev", - "format": "prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", - "integration-test:browser": "karma start --single-run", - "integration-test:node": "nyc mocha -r esm --require source-map-support/register --reporter ../../../common/tools/mocha-multi-reporter.js --full-trace -t 300000 \"dist-esm/storage-blob/test/*.spec.js\" \"dist-esm/storage-blob/test/node/*.spec.js\"", + "format": "dev-tool run vendored prettier --write --config ../../../.prettierrc.json --ignore-path ../../../.prettierignore \"src/**/*.ts\" \"test/**/*.ts\" \"samples-dev/**/*.ts\" \"*.{js,json}\"", + "integration-test:browser": "dev-tool run test:browser", + "integration-test:node": "dev-tool run test:node-js-input -- --timeout 5000000 'dist-esm/storage-blob/test/*.spec.js' 'dist-esm/storage-blob/test/node/*.spec.js'", "integration-test": "npm run integration-test:node && npm run integration-test:browser", "generate:client": "autorest --typescript ./swagger/README.md", "lint:fix": "eslint package.json api-extractor.json README.md src test --ext .ts,.javascript,.js --fix", @@ -58,8 +51,8 @@ "test:browser": "npm run clean && npm run build:test && npm run unit-test:browser", "test:node": "npm run clean && npm run build:test && npm run unit-test:node", "test": "npm run clean && npm run build:test && npm run unit-test", - "unit-test:browser": "karma start --single-run", - "unit-test:node": "mocha -r esm --require ts-node/register --reporter ../../../common/tools/mocha-multi-reporter.js --timeout 1200000 --full-trace \"test/{,!(browser)/**/}*.spec.ts\"", + "unit-test:browser": "dev-tool run test:browser", + "unit-test:node": "dev-tool run test:node-ts-input -- --timeout 1200000 --exclude 'test/**/browser/*.spec.ts' 'test/**/*.spec.ts'", "unit-test": "npm run unit-test:node && npm run unit-test:browser", "emulator-tests": "cross-env STORAGE_CONNECTION_STRING=UseDevelopmentStorage=true && npm run test:node" }, @@ -70,7 +63,6 @@ "dist-esm/storage-internal-avro/src/", "dist-esm/storage-common/src/", "types/latest/storage-blob.d.ts", - "types/3.1/storage-blob.d.ts", "README.md", "LICENSE" ], @@ -132,10 +124,15 @@ }, "dependencies": { "@azure/abort-controller": "^1.0.0", - "@azure/core-http": "^3.0.0", + "@azure/core-auth": "^1.4.0", + "@azure/core-client": "^1.6.2", + "@azure/core-http-compat": "^2.0.0", "@azure/core-lro": "^2.2.0", "@azure/core-paging": "^1.1.1", - "@azure/core-tracing": "1.0.0-preview.13", + "@azure/core-rest-pipeline": "^1.10.1", + "@azure/core-tracing": "^1.0.0", + "@azure/core-util": "^1.6.1", + "@azure/core-xml": "^1.3.2", "@azure/logger": "^1.0.0", "events": "^3.0.0", "tslib": "^2.2.0" @@ -143,45 +140,37 @@ "devDependencies": { "@azure/dev-tool": "^1.0.0", "@azure/eslint-plugin-azure-sdk": "^3.0.0", - "@azure/identity": "^2.0.1", - "@azure/test-utils": "^1.0.0", - "@azure-tools/test-recorder": "^1.0.0", - "@azure/test-utils-perf": "^1.0.0", + "@azure/identity": "^4.0.1", + "@azure-tools/test-utils": "^1.0.1", + "@azure-tools/test-credential": "^1.0.0", + "@azure-tools/test-recorder": "^3.0.0", + "@azure-tools/test-perf": "^1.0.0", "@microsoft/api-extractor": "^7.31.1", "@types/chai": "^4.1.6", - "@types/mocha": "^7.0.2", - "@types/node": "^14.0.0", - "@types/node-fetch": "^2.5.0", + "@types/mocha": "^10.0.0", + "@types/node": "^18.0.0", "chai": "^4.2.0", "cross-env": "^7.0.2", "dotenv": "^16.0.0", - "downlevel-dts": "^0.10.0", "es6-promise": "^4.2.5", "eslint": "^8.0.0", - "esm": "^3.2.18", "inherits": "^2.0.3", "karma": "^6.2.0", "karma-chrome-launcher": "^3.0.0", "karma-coverage": "^2.0.0", - "karma-edge-launcher": "^0.4.2", "karma-env-preprocessor": "^0.1.1", "karma-firefox-launcher": "^1.1.0", - "karma-ie-launcher": "^1.0.0", - "karma-json-preprocessor": "^0.3.3", - "karma-json-to-file-reporter": "^1.0.1", "karma-junit-reporter": "^2.0.1", "karma-mocha": "^2.0.1", "karma-mocha-reporter": "^2.2.5", "karma-sourcemap-loader": "^0.3.8", - "mocha": "^7.1.1", - "mocha-junit-reporter": "^2.0.0", - "nyc": "^15.0.0", - "prettier": "^2.5.1", - "puppeteer": "^19.2.2", - "rimraf": "^3.0.0", + "mocha": "^10.0.0", + "nyc": "^15.1.0", + "puppeteer": "^22.2.0", + "rimraf": "^5.0.5", "source-map-support": "^0.5.9", "ts-node": "^10.0.0", - "typescript": "~4.8.0", + "typescript": "~5.4.5", "util": "^0.12.1" } } diff --git a/node_modules/@formatjs/ecma402-abstract/NumberFormat/format_to_parts.js b/node_modules/@formatjs/ecma402-abstract/NumberFormat/format_to_parts.js index 49dfab82f..c21847369 100644 --- a/node_modules/@formatjs/ecma402-abstract/NumberFormat/format_to_parts.js +++ b/node_modules/@formatjs/ecma402-abstract/NumberFormat/format_to_parts.js @@ -1,8 +1,8 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); +var regex_generated_1 = require("../regex.generated"); var ToRawFixed_1 = require("./ToRawFixed"); var digit_mapping_generated_1 = require("./digit-mapping.generated"); -var regex_generated_1 = require("../regex.generated"); // This is from: unicode-12.1.0/General_Category/Symbol/regex.js // IE11 does not support unicode flag, otherwise this is just /\p{S}/u. // /^\p{S}/u @@ -119,7 +119,7 @@ function formatToParts(numberResult, data, pl, options) { // We only need to handle scientific and engineering notation here. numberParts.push.apply(numberParts, paritionNumberIntoParts(symbols, numberResult, notation, exponent, numberingSystem, // If compact number pattern exists, do not insert group separators. - !compactNumberPattern && Boolean(options.useGrouping), decimalNumberPattern)); + !compactNumberPattern && Boolean(options.useGrouping), decimalNumberPattern, style)); break; } case '-': @@ -262,7 +262,7 @@ function paritionNumberIntoParts(symbols, numberResult, notation, exponent, numb * A typical value looks like "#,##0.00" (primary group size is 3). * Some locales like Hindi has secondary group size of 2 (e.g. "#,##,##0.00"). */ -decimalNumberPattern) { +decimalNumberPattern, style) { var result = []; // eslint-disable-next-line prefer-const var n = numberResult.formattedString, x = numberResult.roundedNumber; @@ -295,7 +295,11 @@ decimalNumberPattern) { // NumberFormat('de', {notation: 'compact', compactDisplay: 'short'}).format(1234) //=> "1234" // NumberFormat('de').format(1234) //=> "1.234" if (useGrouping && (notation !== 'compact' || x >= 10000)) { - var groupSepSymbol = symbols.group; + // a. Let groupSepSymbol be the implementation-, locale-, and numbering system-dependent (ILND) String representing the grouping separator. + // For currency we should use `currencyGroup` instead of generic `group` + var groupSepSymbol = style === 'currency' && symbols.currencyGroup != null + ? symbols.currencyGroup + : symbols.group; var groups = []; // > There may be two different grouping sizes: The primary grouping size used for the least // > significant integer group, and the secondary grouping size used for more significant groups. @@ -339,7 +343,10 @@ decimalNumberPattern) { } // #endregion if (fraction !== undefined) { - result.push({ type: 'decimal', value: symbols.decimal }, { type: 'fraction', value: fraction }); + var decimalSepSymbol = style === 'currency' && symbols.currencyDecimal != null + ? symbols.currencyDecimal + : symbols.decimal; + result.push({ type: 'decimal', value: decimalSepSymbol }, { type: 'fraction', value: fraction }); } if ((notation === 'scientific' || notation === 'engineering') && isFinite(x)) { diff --git a/node_modules/@formatjs/ecma402-abstract/index.js b/node_modules/@formatjs/ecma402-abstract/index.js index f49d25f40..fcfda9896 100644 --- a/node_modules/@formatjs/ecma402-abstract/index.js +++ b/node_modules/@formatjs/ecma402-abstract/index.js @@ -1,6 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.invariant = exports.isMissingLocaleDataError = exports.defineProperty = exports.getMagnitude = exports.setMultiInternalSlots = exports.setInternalSlot = exports.isLiteralPart = exports.getMultiInternalSlots = exports.getInternalSlot = exports._formatToParts = void 0; +exports.invariant = exports.isMissingLocaleDataError = exports.setMultiInternalSlots = exports.setInternalSlot = exports.isLiteralPart = exports.getMultiInternalSlots = exports.getMagnitude = exports.getInternalSlot = exports.defineProperty = exports.createDataProperty = exports._formatToParts = void 0; var tslib_1 = require("tslib"); tslib_1.__exportStar(require("./CanonicalizeLocaleList"), exports); tslib_1.__exportStar(require("./CanonicalizeTimeZoneName"), exports); @@ -36,21 +36,22 @@ Object.defineProperty(exports, "_formatToParts", { enumerable: true, get: functi tslib_1.__exportStar(require("./PartitionPattern"), exports); tslib_1.__exportStar(require("./SupportedLocales"), exports); var utils_1 = require("./utils"); +Object.defineProperty(exports, "createDataProperty", { enumerable: true, get: function () { return utils_1.createDataProperty; } }); +Object.defineProperty(exports, "defineProperty", { enumerable: true, get: function () { return utils_1.defineProperty; } }); Object.defineProperty(exports, "getInternalSlot", { enumerable: true, get: function () { return utils_1.getInternalSlot; } }); +Object.defineProperty(exports, "getMagnitude", { enumerable: true, get: function () { return utils_1.getMagnitude; } }); Object.defineProperty(exports, "getMultiInternalSlots", { enumerable: true, get: function () { return utils_1.getMultiInternalSlots; } }); Object.defineProperty(exports, "isLiteralPart", { enumerable: true, get: function () { return utils_1.isLiteralPart; } }); Object.defineProperty(exports, "setInternalSlot", { enumerable: true, get: function () { return utils_1.setInternalSlot; } }); Object.defineProperty(exports, "setMultiInternalSlots", { enumerable: true, get: function () { return utils_1.setMultiInternalSlots; } }); -Object.defineProperty(exports, "getMagnitude", { enumerable: true, get: function () { return utils_1.getMagnitude; } }); -Object.defineProperty(exports, "defineProperty", { enumerable: true, get: function () { return utils_1.defineProperty; } }); +tslib_1.__exportStar(require("./262"), exports); var data_1 = require("./data"); Object.defineProperty(exports, "isMissingLocaleDataError", { enumerable: true, get: function () { return data_1.isMissingLocaleDataError; } }); -tslib_1.__exportStar(require("./types/relative-time"), exports); tslib_1.__exportStar(require("./types/date-time"), exports); +tslib_1.__exportStar(require("./types/displaynames"), exports); tslib_1.__exportStar(require("./types/list"), exports); -tslib_1.__exportStar(require("./types/plural-rules"), exports); tslib_1.__exportStar(require("./types/number"), exports); -tslib_1.__exportStar(require("./types/displaynames"), exports); +tslib_1.__exportStar(require("./types/plural-rules"), exports); +tslib_1.__exportStar(require("./types/relative-time"), exports); var utils_2 = require("./utils"); Object.defineProperty(exports, "invariant", { enumerable: true, get: function () { return utils_2.invariant; } }); -tslib_1.__exportStar(require("./262"), exports); diff --git a/node_modules/@formatjs/ecma402-abstract/lib/NumberFormat/format_to_parts.js b/node_modules/@formatjs/ecma402-abstract/lib/NumberFormat/format_to_parts.js index 924d38b99..e4cee55ee 100644 --- a/node_modules/@formatjs/ecma402-abstract/lib/NumberFormat/format_to_parts.js +++ b/node_modules/@formatjs/ecma402-abstract/lib/NumberFormat/format_to_parts.js @@ -1,6 +1,6 @@ +import { S_UNICODE_REGEX } from '../regex.generated'; import { ToRawFixed } from './ToRawFixed'; import { digitMapping } from './digit-mapping.generated'; -import { S_UNICODE_REGEX } from '../regex.generated'; // This is from: unicode-12.1.0/General_Category/Symbol/regex.js // IE11 does not support unicode flag, otherwise this is just /\p{S}/u. // /^\p{S}/u @@ -117,7 +117,7 @@ export default function formatToParts(numberResult, data, pl, options) { // We only need to handle scientific and engineering notation here. numberParts.push.apply(numberParts, paritionNumberIntoParts(symbols, numberResult, notation, exponent, numberingSystem, // If compact number pattern exists, do not insert group separators. - !compactNumberPattern && Boolean(options.useGrouping), decimalNumberPattern)); + !compactNumberPattern && Boolean(options.useGrouping), decimalNumberPattern, style)); break; } case '-': @@ -259,7 +259,7 @@ function paritionNumberIntoParts(symbols, numberResult, notation, exponent, numb * A typical value looks like "#,##0.00" (primary group size is 3). * Some locales like Hindi has secondary group size of 2 (e.g. "#,##,##0.00"). */ -decimalNumberPattern) { +decimalNumberPattern, style) { var result = []; // eslint-disable-next-line prefer-const var n = numberResult.formattedString, x = numberResult.roundedNumber; @@ -292,7 +292,11 @@ decimalNumberPattern) { // NumberFormat('de', {notation: 'compact', compactDisplay: 'short'}).format(1234) //=> "1234" // NumberFormat('de').format(1234) //=> "1.234" if (useGrouping && (notation !== 'compact' || x >= 10000)) { - var groupSepSymbol = symbols.group; + // a. Let groupSepSymbol be the implementation-, locale-, and numbering system-dependent (ILND) String representing the grouping separator. + // For currency we should use `currencyGroup` instead of generic `group` + var groupSepSymbol = style === 'currency' && symbols.currencyGroup != null + ? symbols.currencyGroup + : symbols.group; var groups = []; // > There may be two different grouping sizes: The primary grouping size used for the least // > significant integer group, and the secondary grouping size used for more significant groups. @@ -336,7 +340,10 @@ decimalNumberPattern) { } // #endregion if (fraction !== undefined) { - result.push({ type: 'decimal', value: symbols.decimal }, { type: 'fraction', value: fraction }); + var decimalSepSymbol = style === 'currency' && symbols.currencyDecimal != null + ? symbols.currencyDecimal + : symbols.decimal; + result.push({ type: 'decimal', value: decimalSepSymbol }, { type: 'fraction', value: fraction }); } if ((notation === 'scientific' || notation === 'engineering') && isFinite(x)) { diff --git a/node_modules/@formatjs/ecma402-abstract/lib/index.js b/node_modules/@formatjs/ecma402-abstract/lib/index.js index 47f736b6d..82c000e5c 100644 --- a/node_modules/@formatjs/ecma402-abstract/lib/index.js +++ b/node_modules/@formatjs/ecma402-abstract/lib/index.js @@ -30,13 +30,13 @@ export * from './NumberFormat/ToRawPrecision'; export { default as _formatToParts } from './NumberFormat/format_to_parts'; export * from './PartitionPattern'; export * from './SupportedLocales'; -export { getInternalSlot, getMultiInternalSlots, isLiteralPart, setInternalSlot, setMultiInternalSlots, getMagnitude, defineProperty, } from './utils'; +export { createDataProperty, defineProperty, getInternalSlot, getMagnitude, getMultiInternalSlots, isLiteralPart, setInternalSlot, setMultiInternalSlots, } from './utils'; +export * from './262'; export { isMissingLocaleDataError } from './data'; -export * from './types/relative-time'; export * from './types/date-time'; +export * from './types/displaynames'; export * from './types/list'; -export * from './types/plural-rules'; export * from './types/number'; -export * from './types/displaynames'; +export * from './types/plural-rules'; +export * from './types/relative-time'; export { invariant } from './utils'; -export * from './262'; diff --git a/node_modules/@formatjs/ecma402-abstract/lib/utils.js b/node_modules/@formatjs/ecma402-abstract/lib/utils.js index 8305797d3..3558b297b 100644 --- a/node_modules/@formatjs/ecma402-abstract/lib/utils.js +++ b/node_modules/@formatjs/ecma402-abstract/lib/utils.js @@ -69,6 +69,20 @@ export function defineProperty(target, name, _a) { value: value, }); } +/** + * 7.3.5 CreateDataProperty + * @param target + * @param name + * @param value + */ +export function createDataProperty(target, name, value) { + Object.defineProperty(target, name, { + configurable: true, + enumerable: true, + writable: true, + value: value, + }); +} export var UNICODE_EXTENSION_SEQUENCE_REGEX = /-u(?:-[0-9a-z]{2,8})+/gi; export function invariant(condition, message, Err) { if (Err === void 0) { Err = Error; } diff --git a/node_modules/@formatjs/ecma402-abstract/package.json b/node_modules/@formatjs/ecma402-abstract/package.json index 232ba8023..ac17e9cfb 100644 --- a/node_modules/@formatjs/ecma402-abstract/package.json +++ b/node_modules/@formatjs/ecma402-abstract/package.json @@ -1,6 +1,6 @@ { "name": "@formatjs/ecma402-abstract", - "version": "1.18.2", + "version": "2.0.0", "description": "A collection of implementation for ECMAScript abstract operations", "keywords": [ "intl", diff --git a/node_modules/@formatjs/ecma402-abstract/utils.js b/node_modules/@formatjs/ecma402-abstract/utils.js index 60ec4c12e..143469eb2 100644 --- a/node_modules/@formatjs/ecma402-abstract/utils.js +++ b/node_modules/@formatjs/ecma402-abstract/utils.js @@ -1,6 +1,6 @@ "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); -exports.invariant = exports.UNICODE_EXTENSION_SEQUENCE_REGEX = exports.defineProperty = exports.isLiteralPart = exports.getMultiInternalSlots = exports.getInternalSlot = exports.setMultiInternalSlots = exports.setInternalSlot = exports.repeat = exports.getMagnitude = void 0; +exports.invariant = exports.UNICODE_EXTENSION_SEQUENCE_REGEX = exports.createDataProperty = exports.defineProperty = exports.isLiteralPart = exports.getMultiInternalSlots = exports.getInternalSlot = exports.setMultiInternalSlots = exports.setInternalSlot = exports.repeat = exports.getMagnitude = void 0; /** * Cannot do Math.log(x) / Math.log(10) bc if IEEE floating point issue * @param x number @@ -80,6 +80,21 @@ function defineProperty(target, name, _a) { }); } exports.defineProperty = defineProperty; +/** + * 7.3.5 CreateDataProperty + * @param target + * @param name + * @param value + */ +function createDataProperty(target, name, value) { + Object.defineProperty(target, name, { + configurable: true, + enumerable: true, + writable: true, + value: value, + }); +} +exports.createDataProperty = createDataProperty; exports.UNICODE_EXTENSION_SEQUENCE_REGEX = /-u(?:-[0-9a-z]{2,8})+/gi; function invariant(condition, message, Err) { if (Err === void 0) { Err = Error; } diff --git a/node_modules/@formatjs/icu-messageformat-parser/package.json b/node_modules/@formatjs/icu-messageformat-parser/package.json index 2176ef018..306c9de0b 100644 --- a/node_modules/@formatjs/icu-messageformat-parser/package.json +++ b/node_modules/@formatjs/icu-messageformat-parser/package.json @@ -1,6 +1,6 @@ { "name": "@formatjs/icu-messageformat-parser", - "version": "2.7.6", + "version": "2.7.8", "main": "index.js", "module": "lib/index.js", "types": "index.d.ts", @@ -12,7 +12,7 @@ }, "dependencies": { "tslib": "^2.4.0", - "@formatjs/icu-skeleton-parser": "1.8.0", - "@formatjs/ecma402-abstract": "1.18.2" + "@formatjs/ecma402-abstract": "2.0.0", + "@formatjs/icu-skeleton-parser": "1.8.2" } } \ No newline at end of file diff --git a/node_modules/@formatjs/icu-skeleton-parser/package.json b/node_modules/@formatjs/icu-skeleton-parser/package.json index 6c2ceb64f..0e47f7846 100644 --- a/node_modules/@formatjs/icu-skeleton-parser/package.json +++ b/node_modules/@formatjs/icu-skeleton-parser/package.json @@ -1,6 +1,6 @@ { "name": "@formatjs/icu-skeleton-parser", - "version": "1.8.0", + "version": "1.8.2", "main": "index.js", "module": "lib/index.js", "types": "index.d.ts", @@ -12,6 +12,6 @@ }, "dependencies": { "tslib": "^2.4.0", - "@formatjs/ecma402-abstract": "1.18.2" + "@formatjs/ecma402-abstract": "2.0.0" } } \ No newline at end of file diff --git a/node_modules/@isaacs/cliui/LICENSE.txt b/node_modules/@isaacs/cliui/LICENSE.txt new file mode 100644 index 000000000..c7e27478a --- /dev/null +++ b/node_modules/@isaacs/cliui/LICENSE.txt @@ -0,0 +1,14 @@ +Copyright (c) 2015, Contributors + +Permission to use, copy, modify, and/or distribute this software +for any purpose with or without fee is hereby granted, provided +that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@isaacs/cliui/build/index.cjs b/node_modules/@isaacs/cliui/build/index.cjs new file mode 100644 index 000000000..aca2b8507 --- /dev/null +++ b/node_modules/@isaacs/cliui/build/index.cjs @@ -0,0 +1,317 @@ +'use strict'; + +const align = { + right: alignRight, + center: alignCenter +}; +const top = 0; +const right = 1; +const bottom = 2; +const left = 3; +class UI { + constructor(opts) { + var _a; + this.width = opts.width; + /* c8 ignore start */ + this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true; + /* c8 ignore stop */ + this.rows = []; + } + span(...args) { + const cols = this.div(...args); + cols.span = true; + } + resetOutput() { + this.rows = []; + } + div(...args) { + if (args.length === 0) { + this.div(''); + } + if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') { + return this.applyLayoutDSL(args[0]); + } + const cols = args.map(arg => { + if (typeof arg === 'string') { + return this.colFromString(arg); + } + return arg; + }); + this.rows.push(cols); + return cols; + } + shouldApplyLayoutDSL(...args) { + return args.length === 1 && typeof args[0] === 'string' && + /[\t\n]/.test(args[0]); + } + applyLayoutDSL(str) { + const rows = str.split('\n').map(row => row.split('\t')); + let leftColumnWidth = 0; + // simple heuristic for layout, make sure the + // second column lines up along the left-hand. + // don't allow the first column to take up more + // than 50% of the screen. + rows.forEach(columns => { + if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) { + leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0])); + } + }); + // generate a table: + // replacing ' ' with padding calculations. + // using the algorithmically generated width. + rows.forEach(columns => { + this.div(...columns.map((r, i) => { + return { + text: r.trim(), + padding: this.measurePadding(r), + width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined + }; + })); + }); + return this.rows[this.rows.length - 1]; + } + colFromString(text) { + return { + text, + padding: this.measurePadding(text) + }; + } + measurePadding(str) { + // measure padding without ansi escape codes + const noAnsi = mixin.stripAnsi(str); + return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length]; + } + toString() { + const lines = []; + this.rows.forEach(row => { + this.rowToString(row, lines); + }); + // don't display any lines with the + // hidden flag set. + return lines + .filter(line => !line.hidden) + .map(line => line.text) + .join('\n'); + } + rowToString(row, lines) { + this.rasterize(row).forEach((rrow, r) => { + let str = ''; + rrow.forEach((col, c) => { + const { width } = row[c]; // the width with padding. + const wrapWidth = this.negatePadding(row[c]); // the width without padding. + let ts = col; // temporary string used during alignment/padding. + if (wrapWidth > mixin.stringWidth(col)) { + ts += ' '.repeat(wrapWidth - mixin.stringWidth(col)); + } + // align the string within its column. + if (row[c].align && row[c].align !== 'left' && this.wrap) { + const fn = align[row[c].align]; + ts = fn(ts, wrapWidth); + if (mixin.stringWidth(ts) < wrapWidth) { + /* c8 ignore start */ + const w = width || 0; + /* c8 ignore stop */ + ts += ' '.repeat(w - mixin.stringWidth(ts) - 1); + } + } + // apply border and padding to string. + const padding = row[c].padding || [0, 0, 0, 0]; + if (padding[left]) { + str += ' '.repeat(padding[left]); + } + str += addBorder(row[c], ts, '| '); + str += ts; + str += addBorder(row[c], ts, ' |'); + if (padding[right]) { + str += ' '.repeat(padding[right]); + } + // if prior row is span, try to render the + // current row on the prior line. + if (r === 0 && lines.length > 0) { + str = this.renderInline(str, lines[lines.length - 1]); + } + }); + // remove trailing whitespace. + lines.push({ + text: str.replace(/ +$/, ''), + span: row.span + }); + }); + return lines; + } + // if the full 'source' can render in + // the target line, do so. + renderInline(source, previousLine) { + const match = source.match(/^ */); + /* c8 ignore start */ + const leadingWhitespace = match ? match[0].length : 0; + /* c8 ignore stop */ + const target = previousLine.text; + const targetTextWidth = mixin.stringWidth(target.trimEnd()); + if (!previousLine.span) { + return source; + } + // if we're not applying wrapping logic, + // just always append to the span. + if (!this.wrap) { + previousLine.hidden = true; + return target + source; + } + if (leadingWhitespace < targetTextWidth) { + return source; + } + previousLine.hidden = true; + return target.trimEnd() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimStart(); + } + rasterize(row) { + const rrows = []; + const widths = this.columnWidths(row); + let wrapped; + // word wrap all columns, and create + // a data-structure that is easy to rasterize. + row.forEach((col, c) => { + // leave room for left and right padding. + col.width = widths[c]; + if (this.wrap) { + wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n'); + } + else { + wrapped = col.text.split('\n'); + } + if (col.border) { + wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.'); + wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'"); + } + // add top and bottom padding. + if (col.padding) { + wrapped.unshift(...new Array(col.padding[top] || 0).fill('')); + wrapped.push(...new Array(col.padding[bottom] || 0).fill('')); + } + wrapped.forEach((str, r) => { + if (!rrows[r]) { + rrows.push([]); + } + const rrow = rrows[r]; + for (let i = 0; i < c; i++) { + if (rrow[i] === undefined) { + rrow.push(''); + } + } + rrow.push(str); + }); + }); + return rrows; + } + negatePadding(col) { + /* c8 ignore start */ + let wrapWidth = col.width || 0; + /* c8 ignore stop */ + if (col.padding) { + wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0); + } + if (col.border) { + wrapWidth -= 4; + } + return wrapWidth; + } + columnWidths(row) { + if (!this.wrap) { + return row.map(col => { + return col.width || mixin.stringWidth(col.text); + }); + } + let unset = row.length; + let remainingWidth = this.width; + // column widths can be set in config. + const widths = row.map(col => { + if (col.width) { + unset--; + remainingWidth -= col.width; + return col.width; + } + return undefined; + }); + // any unset widths should be calculated. + /* c8 ignore start */ + const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0; + /* c8 ignore stop */ + return widths.map((w, i) => { + if (w === undefined) { + return Math.max(unsetWidth, _minWidth(row[i])); + } + return w; + }); + } +} +function addBorder(col, ts, style) { + if (col.border) { + if (/[.']-+[.']/.test(ts)) { + return ''; + } + if (ts.trim().length !== 0) { + return style; + } + return ' '; + } + return ''; +} +// calculates the minimum width of +// a column, based on padding preferences. +function _minWidth(col) { + const padding = col.padding || []; + const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0); + if (col.border) { + return minWidth + 4; + } + return minWidth; +} +function getWindowWidth() { + /* c8 ignore start */ + if (typeof process === 'object' && process.stdout && process.stdout.columns) { + return process.stdout.columns; + } + return 80; +} +/* c8 ignore stop */ +function alignRight(str, width) { + str = str.trim(); + const strWidth = mixin.stringWidth(str); + if (strWidth < width) { + return ' '.repeat(width - strWidth) + str; + } + return str; +} +function alignCenter(str, width) { + str = str.trim(); + const strWidth = mixin.stringWidth(str); + /* c8 ignore start */ + if (strWidth >= width) { + return str; + } + /* c8 ignore stop */ + return ' '.repeat((width - strWidth) >> 1) + str; +} +let mixin; +function cliui(opts, _mixin) { + mixin = _mixin; + return new UI({ + /* c8 ignore start */ + width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(), + wrap: opts === null || opts === void 0 ? void 0 : opts.wrap + /* c8 ignore stop */ + }); +} + +// Bootstrap cliui with CommonJS dependencies: +const stringWidth = require('string-width-cjs'); +const stripAnsi = require('strip-ansi-cjs'); +const wrap = require('wrap-ansi-cjs'); +function ui(opts) { + return cliui(opts, { + stringWidth, + stripAnsi, + wrap + }); +} + +module.exports = ui; diff --git a/node_modules/@isaacs/cliui/build/index.d.cts b/node_modules/@isaacs/cliui/build/index.d.cts new file mode 100644 index 000000000..4567f945e --- /dev/null +++ b/node_modules/@isaacs/cliui/build/index.d.cts @@ -0,0 +1,43 @@ +interface UIOptions { + width: number; + wrap?: boolean; + rows?: string[]; +} +interface Column { + text: string; + width?: number; + align?: "right" | "left" | "center"; + padding: number[]; + border?: boolean; +} +interface ColumnArray extends Array { + span: boolean; +} +interface Line { + hidden?: boolean; + text: string; + span?: boolean; +} +declare class UI { + width: number; + wrap: boolean; + rows: ColumnArray[]; + constructor(opts: UIOptions); + span(...args: ColumnArray): void; + resetOutput(): void; + div(...args: (Column | string)[]): ColumnArray; + private shouldApplyLayoutDSL; + private applyLayoutDSL; + private colFromString; + private measurePadding; + toString(): string; + rowToString(row: ColumnArray, lines: Line[]): Line[]; + // if the full 'source' can render in + // the target line, do so. + private renderInline; + private rasterize; + private negatePadding; + private columnWidths; +} +declare function ui(opts: UIOptions): UI; +export { ui as default }; diff --git a/node_modules/@isaacs/cliui/build/lib/index.js b/node_modules/@isaacs/cliui/build/lib/index.js new file mode 100644 index 000000000..587b5ecd3 --- /dev/null +++ b/node_modules/@isaacs/cliui/build/lib/index.js @@ -0,0 +1,302 @@ +'use strict'; +const align = { + right: alignRight, + center: alignCenter +}; +const top = 0; +const right = 1; +const bottom = 2; +const left = 3; +export class UI { + constructor(opts) { + var _a; + this.width = opts.width; + /* c8 ignore start */ + this.wrap = (_a = opts.wrap) !== null && _a !== void 0 ? _a : true; + /* c8 ignore stop */ + this.rows = []; + } + span(...args) { + const cols = this.div(...args); + cols.span = true; + } + resetOutput() { + this.rows = []; + } + div(...args) { + if (args.length === 0) { + this.div(''); + } + if (this.wrap && this.shouldApplyLayoutDSL(...args) && typeof args[0] === 'string') { + return this.applyLayoutDSL(args[0]); + } + const cols = args.map(arg => { + if (typeof arg === 'string') { + return this.colFromString(arg); + } + return arg; + }); + this.rows.push(cols); + return cols; + } + shouldApplyLayoutDSL(...args) { + return args.length === 1 && typeof args[0] === 'string' && + /[\t\n]/.test(args[0]); + } + applyLayoutDSL(str) { + const rows = str.split('\n').map(row => row.split('\t')); + let leftColumnWidth = 0; + // simple heuristic for layout, make sure the + // second column lines up along the left-hand. + // don't allow the first column to take up more + // than 50% of the screen. + rows.forEach(columns => { + if (columns.length > 1 && mixin.stringWidth(columns[0]) > leftColumnWidth) { + leftColumnWidth = Math.min(Math.floor(this.width * 0.5), mixin.stringWidth(columns[0])); + } + }); + // generate a table: + // replacing ' ' with padding calculations. + // using the algorithmically generated width. + rows.forEach(columns => { + this.div(...columns.map((r, i) => { + return { + text: r.trim(), + padding: this.measurePadding(r), + width: (i === 0 && columns.length > 1) ? leftColumnWidth : undefined + }; + })); + }); + return this.rows[this.rows.length - 1]; + } + colFromString(text) { + return { + text, + padding: this.measurePadding(text) + }; + } + measurePadding(str) { + // measure padding without ansi escape codes + const noAnsi = mixin.stripAnsi(str); + return [0, noAnsi.match(/\s*$/)[0].length, 0, noAnsi.match(/^\s*/)[0].length]; + } + toString() { + const lines = []; + this.rows.forEach(row => { + this.rowToString(row, lines); + }); + // don't display any lines with the + // hidden flag set. + return lines + .filter(line => !line.hidden) + .map(line => line.text) + .join('\n'); + } + rowToString(row, lines) { + this.rasterize(row).forEach((rrow, r) => { + let str = ''; + rrow.forEach((col, c) => { + const { width } = row[c]; // the width with padding. + const wrapWidth = this.negatePadding(row[c]); // the width without padding. + let ts = col; // temporary string used during alignment/padding. + if (wrapWidth > mixin.stringWidth(col)) { + ts += ' '.repeat(wrapWidth - mixin.stringWidth(col)); + } + // align the string within its column. + if (row[c].align && row[c].align !== 'left' && this.wrap) { + const fn = align[row[c].align]; + ts = fn(ts, wrapWidth); + if (mixin.stringWidth(ts) < wrapWidth) { + /* c8 ignore start */ + const w = width || 0; + /* c8 ignore stop */ + ts += ' '.repeat(w - mixin.stringWidth(ts) - 1); + } + } + // apply border and padding to string. + const padding = row[c].padding || [0, 0, 0, 0]; + if (padding[left]) { + str += ' '.repeat(padding[left]); + } + str += addBorder(row[c], ts, '| '); + str += ts; + str += addBorder(row[c], ts, ' |'); + if (padding[right]) { + str += ' '.repeat(padding[right]); + } + // if prior row is span, try to render the + // current row on the prior line. + if (r === 0 && lines.length > 0) { + str = this.renderInline(str, lines[lines.length - 1]); + } + }); + // remove trailing whitespace. + lines.push({ + text: str.replace(/ +$/, ''), + span: row.span + }); + }); + return lines; + } + // if the full 'source' can render in + // the target line, do so. + renderInline(source, previousLine) { + const match = source.match(/^ */); + /* c8 ignore start */ + const leadingWhitespace = match ? match[0].length : 0; + /* c8 ignore stop */ + const target = previousLine.text; + const targetTextWidth = mixin.stringWidth(target.trimEnd()); + if (!previousLine.span) { + return source; + } + // if we're not applying wrapping logic, + // just always append to the span. + if (!this.wrap) { + previousLine.hidden = true; + return target + source; + } + if (leadingWhitespace < targetTextWidth) { + return source; + } + previousLine.hidden = true; + return target.trimEnd() + ' '.repeat(leadingWhitespace - targetTextWidth) + source.trimStart(); + } + rasterize(row) { + const rrows = []; + const widths = this.columnWidths(row); + let wrapped; + // word wrap all columns, and create + // a data-structure that is easy to rasterize. + row.forEach((col, c) => { + // leave room for left and right padding. + col.width = widths[c]; + if (this.wrap) { + wrapped = mixin.wrap(col.text, this.negatePadding(col), { hard: true }).split('\n'); + } + else { + wrapped = col.text.split('\n'); + } + if (col.border) { + wrapped.unshift('.' + '-'.repeat(this.negatePadding(col) + 2) + '.'); + wrapped.push("'" + '-'.repeat(this.negatePadding(col) + 2) + "'"); + } + // add top and bottom padding. + if (col.padding) { + wrapped.unshift(...new Array(col.padding[top] || 0).fill('')); + wrapped.push(...new Array(col.padding[bottom] || 0).fill('')); + } + wrapped.forEach((str, r) => { + if (!rrows[r]) { + rrows.push([]); + } + const rrow = rrows[r]; + for (let i = 0; i < c; i++) { + if (rrow[i] === undefined) { + rrow.push(''); + } + } + rrow.push(str); + }); + }); + return rrows; + } + negatePadding(col) { + /* c8 ignore start */ + let wrapWidth = col.width || 0; + /* c8 ignore stop */ + if (col.padding) { + wrapWidth -= (col.padding[left] || 0) + (col.padding[right] || 0); + } + if (col.border) { + wrapWidth -= 4; + } + return wrapWidth; + } + columnWidths(row) { + if (!this.wrap) { + return row.map(col => { + return col.width || mixin.stringWidth(col.text); + }); + } + let unset = row.length; + let remainingWidth = this.width; + // column widths can be set in config. + const widths = row.map(col => { + if (col.width) { + unset--; + remainingWidth -= col.width; + return col.width; + } + return undefined; + }); + // any unset widths should be calculated. + /* c8 ignore start */ + const unsetWidth = unset ? Math.floor(remainingWidth / unset) : 0; + /* c8 ignore stop */ + return widths.map((w, i) => { + if (w === undefined) { + return Math.max(unsetWidth, _minWidth(row[i])); + } + return w; + }); + } +} +function addBorder(col, ts, style) { + if (col.border) { + if (/[.']-+[.']/.test(ts)) { + return ''; + } + if (ts.trim().length !== 0) { + return style; + } + return ' '; + } + return ''; +} +// calculates the minimum width of +// a column, based on padding preferences. +function _minWidth(col) { + const padding = col.padding || []; + const minWidth = 1 + (padding[left] || 0) + (padding[right] || 0); + if (col.border) { + return minWidth + 4; + } + return minWidth; +} +function getWindowWidth() { + /* c8 ignore start */ + if (typeof process === 'object' && process.stdout && process.stdout.columns) { + return process.stdout.columns; + } + return 80; +} +/* c8 ignore stop */ +function alignRight(str, width) { + str = str.trim(); + const strWidth = mixin.stringWidth(str); + if (strWidth < width) { + return ' '.repeat(width - strWidth) + str; + } + return str; +} +function alignCenter(str, width) { + str = str.trim(); + const strWidth = mixin.stringWidth(str); + /* c8 ignore start */ + if (strWidth >= width) { + return str; + } + /* c8 ignore stop */ + return ' '.repeat((width - strWidth) >> 1) + str; +} +let mixin; +export function cliui(opts, _mixin) { + mixin = _mixin; + return new UI({ + /* c8 ignore start */ + width: (opts === null || opts === void 0 ? void 0 : opts.width) || getWindowWidth(), + wrap: opts === null || opts === void 0 ? void 0 : opts.wrap + /* c8 ignore stop */ + }); +} diff --git a/node_modules/@isaacs/cliui/index.mjs b/node_modules/@isaacs/cliui/index.mjs new file mode 100644 index 000000000..5177519af --- /dev/null +++ b/node_modules/@isaacs/cliui/index.mjs @@ -0,0 +1,14 @@ +// Bootstrap cliui with ESM dependencies: +import { cliui } from './build/lib/index.js' + +import stringWidth from 'string-width' +import stripAnsi from 'strip-ansi' +import wrap from 'wrap-ansi' + +export default function ui (opts) { + return cliui(opts, { + stringWidth, + stripAnsi, + wrap + }) +} diff --git a/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js b/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js new file mode 100644 index 000000000..130a0929b --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js @@ -0,0 +1,8 @@ +export default function ansiRegex({onlyFirst = false} = {}) { + const pattern = [ + '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)', + '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))' + ].join('|'); + + return new RegExp(pattern, onlyFirst ? undefined : 'g'); +} diff --git a/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json b/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json new file mode 100644 index 000000000..7bbb563bf --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json @@ -0,0 +1,58 @@ +{ + "name": "ansi-regex", + "version": "6.0.1", + "description": "Regular expression for matching ANSI escape codes", + "license": "MIT", + "repository": "chalk/ansi-regex", + "funding": "https://github.com/chalk/ansi-regex?sponsor=1", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "engines": { + "node": ">=12" + }, + "scripts": { + "test": "xo && ava && tsd", + "view-supported": "node fixtures/view-codes.js" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "text", + "regex", + "regexp", + "re", + "match", + "test", + "find", + "pattern" + ], + "devDependencies": { + "ava": "^3.15.0", + "tsd": "^0.14.0", + "xo": "^0.38.2" + } +} diff --git a/node_modules/@isaacs/cliui/node_modules/ansi-styles/index.js b/node_modules/@isaacs/cliui/node_modules/ansi-styles/index.js new file mode 100644 index 000000000..d7bede44b --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/ansi-styles/index.js @@ -0,0 +1,223 @@ +const ANSI_BACKGROUND_OFFSET = 10; + +const wrapAnsi16 = (offset = 0) => code => `\u001B[${code + offset}m`; + +const wrapAnsi256 = (offset = 0) => code => `\u001B[${38 + offset};5;${code}m`; + +const wrapAnsi16m = (offset = 0) => (red, green, blue) => `\u001B[${38 + offset};2;${red};${green};${blue}m`; + +const styles = { + modifier: { + reset: [0, 0], + // 21 isn't widely supported and 22 does the same thing + bold: [1, 22], + dim: [2, 22], + italic: [3, 23], + underline: [4, 24], + overline: [53, 55], + inverse: [7, 27], + hidden: [8, 28], + strikethrough: [9, 29], + }, + color: { + black: [30, 39], + red: [31, 39], + green: [32, 39], + yellow: [33, 39], + blue: [34, 39], + magenta: [35, 39], + cyan: [36, 39], + white: [37, 39], + + // Bright color + blackBright: [90, 39], + gray: [90, 39], // Alias of `blackBright` + grey: [90, 39], // Alias of `blackBright` + redBright: [91, 39], + greenBright: [92, 39], + yellowBright: [93, 39], + blueBright: [94, 39], + magentaBright: [95, 39], + cyanBright: [96, 39], + whiteBright: [97, 39], + }, + bgColor: { + bgBlack: [40, 49], + bgRed: [41, 49], + bgGreen: [42, 49], + bgYellow: [43, 49], + bgBlue: [44, 49], + bgMagenta: [45, 49], + bgCyan: [46, 49], + bgWhite: [47, 49], + + // Bright color + bgBlackBright: [100, 49], + bgGray: [100, 49], // Alias of `bgBlackBright` + bgGrey: [100, 49], // Alias of `bgBlackBright` + bgRedBright: [101, 49], + bgGreenBright: [102, 49], + bgYellowBright: [103, 49], + bgBlueBright: [104, 49], + bgMagentaBright: [105, 49], + bgCyanBright: [106, 49], + bgWhiteBright: [107, 49], + }, +}; + +export const modifierNames = Object.keys(styles.modifier); +export const foregroundColorNames = Object.keys(styles.color); +export const backgroundColorNames = Object.keys(styles.bgColor); +export const colorNames = [...foregroundColorNames, ...backgroundColorNames]; + +function assembleStyles() { + const codes = new Map(); + + for (const [groupName, group] of Object.entries(styles)) { + for (const [styleName, style] of Object.entries(group)) { + styles[styleName] = { + open: `\u001B[${style[0]}m`, + close: `\u001B[${style[1]}m`, + }; + + group[styleName] = styles[styleName]; + + codes.set(style[0], style[1]); + } + + Object.defineProperty(styles, groupName, { + value: group, + enumerable: false, + }); + } + + Object.defineProperty(styles, 'codes', { + value: codes, + enumerable: false, + }); + + styles.color.close = '\u001B[39m'; + styles.bgColor.close = '\u001B[49m'; + + styles.color.ansi = wrapAnsi16(); + styles.color.ansi256 = wrapAnsi256(); + styles.color.ansi16m = wrapAnsi16m(); + styles.bgColor.ansi = wrapAnsi16(ANSI_BACKGROUND_OFFSET); + styles.bgColor.ansi256 = wrapAnsi256(ANSI_BACKGROUND_OFFSET); + styles.bgColor.ansi16m = wrapAnsi16m(ANSI_BACKGROUND_OFFSET); + + // From https://github.com/Qix-/color-convert/blob/3f0e0d4e92e235796ccb17f6e85c72094a651f49/conversions.js + Object.defineProperties(styles, { + rgbToAnsi256: { + value: (red, green, blue) => { + // We use the extended greyscale palette here, with the exception of + // black and white. normal palette only has 4 greyscale shades. + if (red === green && green === blue) { + if (red < 8) { + return 16; + } + + if (red > 248) { + return 231; + } + + return Math.round(((red - 8) / 247) * 24) + 232; + } + + return 16 + + (36 * Math.round(red / 255 * 5)) + + (6 * Math.round(green / 255 * 5)) + + Math.round(blue / 255 * 5); + }, + enumerable: false, + }, + hexToRgb: { + value: hex => { + const matches = /[a-f\d]{6}|[a-f\d]{3}/i.exec(hex.toString(16)); + if (!matches) { + return [0, 0, 0]; + } + + let [colorString] = matches; + + if (colorString.length === 3) { + colorString = [...colorString].map(character => character + character).join(''); + } + + const integer = Number.parseInt(colorString, 16); + + return [ + /* eslint-disable no-bitwise */ + (integer >> 16) & 0xFF, + (integer >> 8) & 0xFF, + integer & 0xFF, + /* eslint-enable no-bitwise */ + ]; + }, + enumerable: false, + }, + hexToAnsi256: { + value: hex => styles.rgbToAnsi256(...styles.hexToRgb(hex)), + enumerable: false, + }, + ansi256ToAnsi: { + value: code => { + if (code < 8) { + return 30 + code; + } + + if (code < 16) { + return 90 + (code - 8); + } + + let red; + let green; + let blue; + + if (code >= 232) { + red = (((code - 232) * 10) + 8) / 255; + green = red; + blue = red; + } else { + code -= 16; + + const remainder = code % 36; + + red = Math.floor(code / 36) / 5; + green = Math.floor(remainder / 6) / 5; + blue = (remainder % 6) / 5; + } + + const value = Math.max(red, green, blue) * 2; + + if (value === 0) { + return 30; + } + + // eslint-disable-next-line no-bitwise + let result = 30 + ((Math.round(blue) << 2) | (Math.round(green) << 1) | Math.round(red)); + + if (value === 2) { + result += 60; + } + + return result; + }, + enumerable: false, + }, + rgbToAnsi: { + value: (red, green, blue) => styles.ansi256ToAnsi(styles.rgbToAnsi256(red, green, blue)), + enumerable: false, + }, + hexToAnsi: { + value: hex => styles.ansi256ToAnsi(styles.hexToAnsi256(hex)), + enumerable: false, + }, + }); + + return styles; +} + +const ansiStyles = assembleStyles(); + +export default ansiStyles; diff --git a/node_modules/@isaacs/cliui/node_modules/ansi-styles/package.json b/node_modules/@isaacs/cliui/node_modules/ansi-styles/package.json new file mode 100644 index 000000000..6cd3ca5bf --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/ansi-styles/package.json @@ -0,0 +1,54 @@ +{ + "name": "ansi-styles", + "version": "6.2.1", + "description": "ANSI escape codes for styling strings in the terminal", + "license": "MIT", + "repository": "chalk/ansi-styles", + "funding": "https://github.com/chalk/ansi-styles?sponsor=1", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "engines": { + "node": ">=12" + }, + "scripts": { + "test": "xo && ava && tsd", + "screenshot": "svg-term --command='node screenshot' --out=screenshot.svg --padding=3 --width=55 --height=3 --at=1000 --no-cursor" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "devDependencies": { + "ava": "^3.15.0", + "svg-term-cli": "^2.1.1", + "tsd": "^0.19.0", + "xo": "^0.47.0" + } +} diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/LICENSE-MIT.txt b/node_modules/@isaacs/cliui/node_modules/emoji-regex/LICENSE-MIT.txt new file mode 100644 index 000000000..a41e0a7ef --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/RGI_Emoji.js b/node_modules/@isaacs/cliui/node_modules/emoji-regex/RGI_Emoji.js new file mode 100644 index 000000000..3fbe92410 --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/RGI_Emoji.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = function () { + // https://mths.be/emoji + return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67)\uDB40\uDC7F|(?:\uD83E\uDDD1\uD83C\uDFFF\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFC-\uDFFF])|\uD83D\uDC68(?:\uD83C\uDFFB(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|[\u2695\u2696\u2708]\uFE0F|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))?|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D[\uDC66\uDC67])|\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC)?|(?:\uD83D\uDC69(?:\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69]))|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC69(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83E\uDDD1(?:\u200D(?:\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDE36\u200D\uD83C\uDF2B|\uD83C\uDFF3\uFE0F\u200D\u26A7|\uD83D\uDC3B\u200D\u2744|(?:(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\uD83C\uDFF4\u200D\u2620|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299]|\uD83C[\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]|\uD83D[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3])\uFE0F|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDE35\u200D\uD83D\uDCAB|\uD83D\uDE2E\u200D\uD83D\uDCA8|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83E\uDDD1(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83D\uDC69(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF6\uD83C\uDDE6|\uD83C\uDDF4\uD83C\uDDF2|\uD83D\uDC08\u200D\u2B1B|\u2764\uFE0F\u200D(?:\uD83D\uDD25|\uD83E\uDE79)|\uD83D\uDC41\uFE0F|\uD83C\uDFF3\uFE0F|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|\uD83C\uDFF4|(?:[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270C\u270D]|\uD83D[\uDD74\uDD90])(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC08\uDC15\uDC3B\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE2E\uDE35\uDE36\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5]|\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD]|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED7\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0D\uDD0E\uDD10-\uDD17\uDD1D\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78\uDD7A-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCB\uDDD0\uDDE0-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6]/g; +}; diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/RGI_Emoji.js b/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/RGI_Emoji.js new file mode 100644 index 000000000..ecf32f177 --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/RGI_Emoji.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = () => { + // https://mths.be/emoji + return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0077}\u{E006C}\u{E0073}|\u{E0073}\u{E0063}\u{E0074}|\u{E0065}\u{E006E}\u{E0067})\u{E007F}|(?:\u{1F9D1}\u{1F3FF}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FE}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FD}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FB}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FC}-\u{1F3FF}]|\u{1F468}(?:\u{1F3FB}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]))?|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u{1F466}\u{1F467}])|\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC})?|(?:\u{1F469}(?:\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}]))|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]\u200D\u{1F91D}\u200D\u{1F9D1})[\u{1F3FB}-\u{1F3FF}]|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F469}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F9D1}(?:\u200D(?:\u{1F91D}\u200D\u{1F9D1}|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F9D1}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F636}\u200D\u{1F32B}|\u{1F3F3}\uFE0F\u200D\u26A7|\u{1F43B}\u200D\u2744|(?:[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u{1F3F4}\u200D\u2620|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F202}\u{1F237}\u{1F321}\u{1F324}-\u{1F32C}\u{1F336}\u{1F37D}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}\u{1F39F}\u{1F3CD}\u{1F3CE}\u{1F3D4}-\u{1F3DF}\u{1F3F5}\u{1F3F7}\u{1F43F}\u{1F4FD}\u{1F549}\u{1F54A}\u{1F56F}\u{1F570}\u{1F573}\u{1F576}-\u{1F579}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}\u{1F6CB}\u{1F6CD}-\u{1F6CF}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6F0}\u{1F6F3}])\uFE0F|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F469}\u200D\u{1F467}|\u{1F469}\u200D\u{1F466}|\u{1F635}\u200D\u{1F4AB}|\u{1F62E}\u200D\u{1F4A8}|\u{1F415}\u200D\u{1F9BA}|\u{1F9D1}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F469}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F1FD}\u{1F1F0}|\u{1F1F6}\u{1F1E6}|\u{1F1F4}\u{1F1F2}|\u{1F408}\u200D\u2B1B|\u2764\uFE0F\u200D[\u{1F525}\u{1FA79}]|\u{1F441}\uFE0F|\u{1F3F3}\uFE0F|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]|\u{1F3F4}|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270C\u270D\u{1F574}\u{1F590}][\uFE0F\u{1F3FB}-\u{1F3FF}]|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F408}\u{1F415}\u{1F43B}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F62E}\u{1F635}\u{1F636}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F384}\u{1F386}-\u{1F393}\u{1F3A0}-\u{1F3C1}\u{1F3C5}\u{1F3C6}\u{1F3C8}\u{1F3C9}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F8}-\u{1F407}\u{1F409}-\u{1F414}\u{1F416}-\u{1F43A}\u{1F43C}-\u{1F43E}\u{1F440}\u{1F444}\u{1F445}\u{1F451}-\u{1F465}\u{1F46A}\u{1F479}-\u{1F47B}\u{1F47D}-\u{1F480}\u{1F484}\u{1F488}-\u{1F48E}\u{1F490}\u{1F492}-\u{1F4A9}\u{1F4AB}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F5A4}\u{1F5FB}-\u{1F62D}\u{1F62F}-\u{1F634}\u{1F637}-\u{1F644}\u{1F648}-\u{1F64A}\u{1F680}-\u{1F6A2}\u{1F6A4}-\u{1F6B3}\u{1F6B7}-\u{1F6BF}\u{1F6C1}-\u{1F6C5}\u{1F6D0}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90D}\u{1F90E}\u{1F910}-\u{1F917}\u{1F91D}\u{1F920}-\u{1F925}\u{1F927}-\u{1F92F}\u{1F93A}\u{1F93F}-\u{1F945}\u{1F947}-\u{1F976}\u{1F978}\u{1F97A}-\u{1F9B4}\u{1F9B7}\u{1F9BA}\u{1F9BC}-\u{1F9CB}\u{1F9D0}\u{1F9E0}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]/gu; +}; diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/index.js b/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/index.js new file mode 100644 index 000000000..1a4fc8d0d --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/index.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = () => { + // https://mths.be/emoji + return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0077}\u{E006C}\u{E0073}|\u{E0073}\u{E0063}\u{E0074}|\u{E0065}\u{E006E}\u{E0067})\u{E007F}|(?:\u{1F9D1}\u{1F3FF}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FE}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FD}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FB}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FC}-\u{1F3FF}]|\u{1F468}(?:\u{1F3FB}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]))?|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u{1F466}\u{1F467}])|\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC})?|(?:\u{1F469}(?:\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}]))|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]\u200D\u{1F91D}\u200D\u{1F9D1})[\u{1F3FB}-\u{1F3FF}]|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F469}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F9D1}(?:\u200D(?:\u{1F91D}\u200D\u{1F9D1}|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F9D1}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F636}\u200D\u{1F32B}|\u{1F3F3}\uFE0F\u200D\u26A7|\u{1F43B}\u200D\u2744|(?:[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u{1F3F4}\u200D\u2620|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F202}\u{1F237}\u{1F321}\u{1F324}-\u{1F32C}\u{1F336}\u{1F37D}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}\u{1F39F}\u{1F3CD}\u{1F3CE}\u{1F3D4}-\u{1F3DF}\u{1F3F5}\u{1F3F7}\u{1F43F}\u{1F4FD}\u{1F549}\u{1F54A}\u{1F56F}\u{1F570}\u{1F573}\u{1F576}-\u{1F579}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}\u{1F6CB}\u{1F6CD}-\u{1F6CF}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6F0}\u{1F6F3}])\uFE0F|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F469}\u200D\u{1F467}|\u{1F469}\u200D\u{1F466}|\u{1F635}\u200D\u{1F4AB}|\u{1F62E}\u200D\u{1F4A8}|\u{1F415}\u200D\u{1F9BA}|\u{1F9D1}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F469}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F1FD}\u{1F1F0}|\u{1F1F6}\u{1F1E6}|\u{1F1F4}\u{1F1F2}|\u{1F408}\u200D\u2B1B|\u2764\uFE0F\u200D[\u{1F525}\u{1FA79}]|\u{1F441}\uFE0F|\u{1F3F3}\uFE0F|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]|\u{1F3F4}|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270C\u270D\u{1F574}\u{1F590}][\uFE0F\u{1F3FB}-\u{1F3FF}]|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F408}\u{1F415}\u{1F43B}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F62E}\u{1F635}\u{1F636}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F384}\u{1F386}-\u{1F393}\u{1F3A0}-\u{1F3C1}\u{1F3C5}\u{1F3C6}\u{1F3C8}\u{1F3C9}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F8}-\u{1F407}\u{1F409}-\u{1F414}\u{1F416}-\u{1F43A}\u{1F43C}-\u{1F43E}\u{1F440}\u{1F444}\u{1F445}\u{1F451}-\u{1F465}\u{1F46A}\u{1F479}-\u{1F47B}\u{1F47D}-\u{1F480}\u{1F484}\u{1F488}-\u{1F48E}\u{1F490}\u{1F492}-\u{1F4A9}\u{1F4AB}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F5A4}\u{1F5FB}-\u{1F62D}\u{1F62F}-\u{1F634}\u{1F637}-\u{1F644}\u{1F648}-\u{1F64A}\u{1F680}-\u{1F6A2}\u{1F6A4}-\u{1F6B3}\u{1F6B7}-\u{1F6BF}\u{1F6C1}-\u{1F6C5}\u{1F6D0}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90D}\u{1F90E}\u{1F910}-\u{1F917}\u{1F91D}\u{1F920}-\u{1F925}\u{1F927}-\u{1F92F}\u{1F93A}\u{1F93F}-\u{1F945}\u{1F947}-\u{1F976}\u{1F978}\u{1F97A}-\u{1F9B4}\u{1F9B7}\u{1F9BA}\u{1F9BC}-\u{1F9CB}\u{1F9D0}\u{1F9E0}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F393}\u{1F3A0}-\u{1F3CA}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F4}\u{1F3F8}-\u{1F43E}\u{1F440}\u{1F442}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F57A}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5FB}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CC}\u{1F6D0}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90C}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F978}\u{1F97A}-\u{1F9CB}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26A7\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90C}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F978}\u{1F97A}-\u{1F9CB}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]\uFE0F|[\u261D\u26F9\u270A-\u270D\u{1F385}\u{1F3C2}-\u{1F3C4}\u{1F3C7}\u{1F3CA}-\u{1F3CC}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}-\u{1F478}\u{1F47C}\u{1F481}-\u{1F483}\u{1F485}-\u{1F487}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F574}\u{1F575}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F645}-\u{1F647}\u{1F64B}-\u{1F64F}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91F}\u{1F926}\u{1F930}-\u{1F939}\u{1F93C}-\u{1F93E}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9B8}\u{1F9B9}\u{1F9BB}\u{1F9CD}-\u{1F9CF}\u{1F9D1}-\u{1F9DD}]/gu; +}; diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/text.js b/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/text.js new file mode 100644 index 000000000..8e9f98575 --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/es2015/text.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = () => { + // https://mths.be/emoji + return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0077}\u{E006C}\u{E0073}|\u{E0073}\u{E0063}\u{E0074}|\u{E0065}\u{E006E}\u{E0067})\u{E007F}|(?:\u{1F9D1}\u{1F3FF}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FE}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FD}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|(?:\u{1F9D1}\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F9D1}|\u{1F469}\u{1F3FB}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FC}-\u{1F3FF}]|\u{1F468}(?:\u{1F3FB}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]))?|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FF}]|\u{1F468}[\u{1F3FB}-\u{1F3FF}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u{1F466}\u{1F467}])|\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC})?|(?:\u{1F469}(?:\u{1F3FB}\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F3FC}-\u{1F3FF}]\u200D\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}]))|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]\u200D\u{1F91D}\u200D\u{1F9D1})[\u{1F3FB}-\u{1F3FF}]|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F469}(?:\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F9D1}(?:\u200D(?:\u{1F91D}\u200D\u{1F9D1}|[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FE}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FD}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FC}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F37C}\u{1F384}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F9D1}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\u{1F636}\u200D\u{1F32B}|\u{1F3F3}\uFE0F\u200D\u26A7|\u{1F43B}\u200D\u2744|(?:[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u{1F3F4}\u200D\u2620|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F202}\u{1F237}\u{1F321}\u{1F324}-\u{1F32C}\u{1F336}\u{1F37D}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}\u{1F39F}\u{1F3CD}\u{1F3CE}\u{1F3D4}-\u{1F3DF}\u{1F3F5}\u{1F3F7}\u{1F43F}\u{1F4FD}\u{1F549}\u{1F54A}\u{1F56F}\u{1F570}\u{1F573}\u{1F576}-\u{1F579}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}\u{1F6CB}\u{1F6CD}-\u{1F6CF}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6F0}\u{1F6F3}])\uFE0F|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F469}\u200D\u{1F467}|\u{1F469}\u200D\u{1F466}|\u{1F635}\u200D\u{1F4AB}|\u{1F62E}\u200D\u{1F4A8}|\u{1F415}\u200D\u{1F9BA}|\u{1F9D1}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F469}(?:\u{1F3FF}|\u{1F3FE}|\u{1F3FD}|\u{1F3FC}|\u{1F3FB})?|\u{1F1FD}\u{1F1F0}|\u{1F1F6}\u{1F1E6}|\u{1F1F4}\u{1F1F2}|\u{1F408}\u200D\u2B1B|\u2764\uFE0F\u200D[\u{1F525}\u{1FA79}]|\u{1F441}\uFE0F|\u{1F3F3}\uFE0F|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\uFE0F\u{1F3FB}-\u{1F3FF}]|\u{1F3F4}|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270C\u270D\u{1F574}\u{1F590}][\uFE0F\u{1F3FB}-\u{1F3FF}]|[\u270A\u270B\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F408}\u{1F415}\u{1F43B}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F57A}\u{1F595}\u{1F596}\u{1F62E}\u{1F635}\u{1F636}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90C}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F934}\u{1F936}\u{1F977}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}\u{1F9D3}\u{1F9D5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F470}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F935}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D4}\u{1F9D6}-\u{1F9DD}]|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F384}\u{1F386}-\u{1F393}\u{1F3A0}-\u{1F3C1}\u{1F3C5}\u{1F3C6}\u{1F3C8}\u{1F3C9}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F8}-\u{1F407}\u{1F409}-\u{1F414}\u{1F416}-\u{1F43A}\u{1F43C}-\u{1F43E}\u{1F440}\u{1F444}\u{1F445}\u{1F451}-\u{1F465}\u{1F46A}\u{1F479}-\u{1F47B}\u{1F47D}-\u{1F480}\u{1F484}\u{1F488}-\u{1F48E}\u{1F490}\u{1F492}-\u{1F4A9}\u{1F4AB}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F5A4}\u{1F5FB}-\u{1F62D}\u{1F62F}-\u{1F634}\u{1F637}-\u{1F644}\u{1F648}-\u{1F64A}\u{1F680}-\u{1F6A2}\u{1F6A4}-\u{1F6B3}\u{1F6B7}-\u{1F6BF}\u{1F6C1}-\u{1F6C5}\u{1F6D0}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90D}\u{1F90E}\u{1F910}-\u{1F917}\u{1F91D}\u{1F920}-\u{1F925}\u{1F927}-\u{1F92F}\u{1F93A}\u{1F93F}-\u{1F945}\u{1F947}-\u{1F976}\u{1F978}\u{1F97A}-\u{1F9B4}\u{1F9B7}\u{1F9BA}\u{1F9BC}-\u{1F9CB}\u{1F9D0}\u{1F9E0}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26A7\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}-\u{1F6D7}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FC}\u{1F7E0}-\u{1F7EB}\u{1F90C}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F978}\u{1F97A}-\u{1F9CB}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA74}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA86}\u{1FA90}-\u{1FAA8}\u{1FAB0}-\u{1FAB6}\u{1FAC0}-\u{1FAC2}\u{1FAD0}-\u{1FAD6}]\uFE0F?/gu; +}; diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/index.js b/node_modules/@isaacs/cliui/node_modules/emoji-regex/index.js new file mode 100644 index 000000000..c0490d4c9 --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/index.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = function () { + // https://mths.be/emoji + return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67)\uDB40\uDC7F|(?:\uD83E\uDDD1\uD83C\uDFFF\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFC-\uDFFF])|\uD83D\uDC68(?:\uD83C\uDFFB(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|[\u2695\u2696\u2708]\uFE0F|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))?|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D[\uDC66\uDC67])|\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC)?|(?:\uD83D\uDC69(?:\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69]))|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC69(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83E\uDDD1(?:\u200D(?:\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDE36\u200D\uD83C\uDF2B|\uD83C\uDFF3\uFE0F\u200D\u26A7|\uD83D\uDC3B\u200D\u2744|(?:(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\uD83C\uDFF4\u200D\u2620|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299]|\uD83C[\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]|\uD83D[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3])\uFE0F|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDE35\u200D\uD83D\uDCAB|\uD83D\uDE2E\u200D\uD83D\uDCA8|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83E\uDDD1(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83D\uDC69(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF6\uD83C\uDDE6|\uD83C\uDDF4\uD83C\uDDF2|\uD83D\uDC08\u200D\u2B1B|\u2764\uFE0F\u200D(?:\uD83D\uDD25|\uD83E\uDE79)|\uD83D\uDC41\uFE0F|\uD83C\uDFF3\uFE0F|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|\uD83C\uDFF4|(?:[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270C\u270D]|\uD83D[\uDD74\uDD90])(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC08\uDC15\uDC3B\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE2E\uDE35\uDE36\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5]|\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD]|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED7\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0D\uDD0E\uDD10-\uDD17\uDD1D\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78\uDD7A-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCB\uDDD0\uDDE0-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6]|(?:[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF93\uDFA0-\uDFCA\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF4\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC3E\uDC40\uDC42-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDD7A\uDD95\uDD96\uDDA4\uDDFB-\uDE4F\uDE80-\uDEC5\uDECC\uDED0-\uDED2\uDED5-\uDED7\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0C-\uDD3A\uDD3C-\uDD45\uDD47-\uDD78\uDD7A-\uDDCB\uDDCD-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6])|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26A7\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5-\uDED7\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0C-\uDD3A\uDD3C-\uDD45\uDD47-\uDD78\uDD7A-\uDDCB\uDDCD-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6])\uFE0F|(?:[\u261D\u26F9\u270A-\u270D]|\uD83C[\uDF85\uDFC2-\uDFC4\uDFC7\uDFCA-\uDFCC]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66-\uDC78\uDC7C\uDC81-\uDC83\uDC85-\uDC87\uDC8F\uDC91\uDCAA\uDD74\uDD75\uDD7A\uDD90\uDD95\uDD96\uDE45-\uDE47\uDE4B-\uDE4F\uDEA3\uDEB4-\uDEB6\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1F\uDD26\uDD30-\uDD39\uDD3C-\uDD3E\uDD77\uDDB5\uDDB6\uDDB8\uDDB9\uDDBB\uDDCD-\uDDCF\uDDD1-\uDDDD])/g; +}; diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/package.json b/node_modules/@isaacs/cliui/node_modules/emoji-regex/package.json new file mode 100644 index 000000000..eac892a16 --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/package.json @@ -0,0 +1,52 @@ +{ + "name": "emoji-regex", + "version": "9.2.2", + "description": "A regular expression to match all Emoji-only symbols as per the Unicode Standard.", + "homepage": "https://mths.be/emoji-regex", + "main": "index.js", + "types": "index.d.ts", + "keywords": [ + "unicode", + "regex", + "regexp", + "regular expressions", + "code points", + "symbols", + "characters", + "emoji" + ], + "license": "MIT", + "author": { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + "repository": { + "type": "git", + "url": "https://github.com/mathiasbynens/emoji-regex.git" + }, + "bugs": "https://github.com/mathiasbynens/emoji-regex/issues", + "files": [ + "LICENSE-MIT.txt", + "index.js", + "index.d.ts", + "RGI_Emoji.js", + "RGI_Emoji.d.ts", + "text.js", + "text.d.ts", + "es2015" + ], + "scripts": { + "build": "rm -rf -- es2015; babel src -d .; NODE_ENV=es2015 babel src es2015_types -D -d ./es2015; node script/inject-sequences.js", + "test": "mocha", + "test:watch": "npm run test -- --watch" + }, + "devDependencies": { + "@babel/cli": "^7.4.4", + "@babel/core": "^7.4.4", + "@babel/plugin-proposal-unicode-property-regex": "^7.4.4", + "@babel/preset-env": "^7.4.4", + "@unicode/unicode-13.0.0": "^1.0.3", + "mocha": "^6.1.4", + "regexgen": "^1.3.0" + } +} diff --git a/node_modules/@isaacs/cliui/node_modules/emoji-regex/text.js b/node_modules/@isaacs/cliui/node_modules/emoji-regex/text.js new file mode 100644 index 000000000..9bc63ce74 --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/emoji-regex/text.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = function () { + // https://mths.be/emoji + return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67)\uDB40\uDC7F|(?:\uD83E\uDDD1\uD83C\uDFFF\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFC-\uDFFF])|\uD83D\uDC68(?:\uD83C\uDFFB(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|[\u2695\u2696\u2708]\uFE0F|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))?|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFF]))|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])\uFE0F|\u200D(?:(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D[\uDC66\uDC67])|\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC)?|(?:\uD83D\uDC69(?:\uD83C\uDFFB\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|(?:\uD83C[\uDFFC-\uDFFF])\u200D\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69]))|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC69(?:\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83E\uDDD1(?:\u200D(?:\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|\uD83D\uDE36\u200D\uD83C\uDF2B|\uD83C\uDFF3\uFE0F\u200D\u26A7|\uD83D\uDC3B\u200D\u2744|(?:(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\uD83C\uDFF4\u200D\u2620|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])\u200D[\u2640\u2642]|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u2600-\u2604\u260E\u2611\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26B0\u26B1\u26C8\u26CF\u26D1\u26D3\u26E9\u26F0\u26F1\u26F4\u26F7\u26F8\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u3030\u303D\u3297\u3299]|\uD83C[\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]|\uD83D[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3])\uFE0F|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDE35\u200D\uD83D\uDCAB|\uD83D\uDE2E\u200D\uD83D\uDCA8|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83E\uDDD1(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83D\uDC69(?:\uD83C\uDFFF|\uD83C\uDFFE|\uD83C\uDFFD|\uD83C\uDFFC|\uD83C\uDFFB)?|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF6\uD83C\uDDE6|\uD83C\uDDF4\uD83C\uDDF2|\uD83D\uDC08\u200D\u2B1B|\u2764\uFE0F\u200D(?:\uD83D\uDD25|\uD83E\uDE79)|\uD83D\uDC41\uFE0F|\uD83C\uDFF3\uFE0F|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|[#\*0-9]\uFE0F\u20E3|\u2764\uFE0F|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|\uD83C\uDFF4|(?:[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270C\u270D]|\uD83D[\uDD74\uDD90])(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])|[\u270A\u270B]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC08\uDC15\uDC3B\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE2E\uDE35\uDE36\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0C\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5]|\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD4\uDDD6-\uDDDD]|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED7\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0D\uDD0E\uDD10-\uDD17\uDD1D\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78\uDD7A-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCB\uDDD0\uDDE0-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6]|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26A7\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5-\uDED7\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFC\uDFE0-\uDFEB]|\uD83E[\uDD0C-\uDD3A\uDD3C-\uDD45\uDD47-\uDD78\uDD7A-\uDDCB\uDDCD-\uDDFF\uDE70-\uDE74\uDE78-\uDE7A\uDE80-\uDE86\uDE90-\uDEA8\uDEB0-\uDEB6\uDEC0-\uDEC2\uDED0-\uDED6])\uFE0F?/g; +}; diff --git a/node_modules/@isaacs/cliui/node_modules/string-width/index.js b/node_modules/@isaacs/cliui/node_modules/string-width/index.js new file mode 100644 index 000000000..9294488f8 --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/string-width/index.js @@ -0,0 +1,54 @@ +import stripAnsi from 'strip-ansi'; +import eastAsianWidth from 'eastasianwidth'; +import emojiRegex from 'emoji-regex'; + +export default function stringWidth(string, options = {}) { + if (typeof string !== 'string' || string.length === 0) { + return 0; + } + + options = { + ambiguousIsNarrow: true, + ...options + }; + + string = stripAnsi(string); + + if (string.length === 0) { + return 0; + } + + string = string.replace(emojiRegex(), ' '); + + const ambiguousCharacterWidth = options.ambiguousIsNarrow ? 1 : 2; + let width = 0; + + for (const character of string) { + const codePoint = character.codePointAt(0); + + // Ignore control characters + if (codePoint <= 0x1F || (codePoint >= 0x7F && codePoint <= 0x9F)) { + continue; + } + + // Ignore combining characters + if (codePoint >= 0x300 && codePoint <= 0x36F) { + continue; + } + + const code = eastAsianWidth.eastAsianWidth(character); + switch (code) { + case 'F': + case 'W': + width += 2; + break; + case 'A': + width += ambiguousCharacterWidth; + break; + default: + width += 1; + } + } + + return width; +} diff --git a/node_modules/@isaacs/cliui/node_modules/string-width/package.json b/node_modules/@isaacs/cliui/node_modules/string-width/package.json new file mode 100644 index 000000000..f46d6770f --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/string-width/package.json @@ -0,0 +1,59 @@ +{ + "name": "string-width", + "version": "5.1.2", + "description": "Get the visual width of a string - the number of columns required to display it", + "license": "MIT", + "repository": "sindresorhus/string-width", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "engines": { + "node": ">=12" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "string", + "character", + "unicode", + "width", + "visual", + "column", + "columns", + "fullwidth", + "full-width", + "full", + "ansi", + "escape", + "codes", + "cli", + "command-line", + "terminal", + "console", + "cjk", + "chinese", + "japanese", + "korean", + "fixed-width" + ], + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "devDependencies": { + "ava": "^3.15.0", + "tsd": "^0.14.0", + "xo": "^0.38.2" + } +} diff --git a/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js b/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js new file mode 100644 index 000000000..ba19750e6 --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/strip-ansi/index.js @@ -0,0 +1,14 @@ +import ansiRegex from 'ansi-regex'; + +const regex = ansiRegex(); + +export default function stripAnsi(string) { + if (typeof string !== 'string') { + throw new TypeError(`Expected a \`string\`, got \`${typeof string}\``); + } + + // Even though the regex is global, we don't need to reset the `.lastIndex` + // because unlike `.exec()` and `.test()`, `.replace()` does it automatically + // and doing it manually has a performance penalty. + return string.replace(regex, ''); +} diff --git a/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json b/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json new file mode 100644 index 000000000..e1f455c32 --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json @@ -0,0 +1,57 @@ +{ + "name": "strip-ansi", + "version": "7.1.0", + "description": "Strip ANSI escape codes from a string", + "license": "MIT", + "repository": "chalk/strip-ansi", + "funding": "https://github.com/chalk/strip-ansi?sponsor=1", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": "./index.js", + "engines": { + "node": ">=12" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "strip", + "trim", + "remove", + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "devDependencies": { + "ava": "^3.15.0", + "tsd": "^0.17.0", + "xo": "^0.44.0" + } +} diff --git a/node_modules/@isaacs/cliui/node_modules/wrap-ansi/index.js b/node_modules/@isaacs/cliui/node_modules/wrap-ansi/index.js new file mode 100755 index 000000000..d80c74c19 --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/wrap-ansi/index.js @@ -0,0 +1,214 @@ +import stringWidth from 'string-width'; +import stripAnsi from 'strip-ansi'; +import ansiStyles from 'ansi-styles'; + +const ESCAPES = new Set([ + '\u001B', + '\u009B', +]); + +const END_CODE = 39; +const ANSI_ESCAPE_BELL = '\u0007'; +const ANSI_CSI = '['; +const ANSI_OSC = ']'; +const ANSI_SGR_TERMINATOR = 'm'; +const ANSI_ESCAPE_LINK = `${ANSI_OSC}8;;`; + +const wrapAnsiCode = code => `${ESCAPES.values().next().value}${ANSI_CSI}${code}${ANSI_SGR_TERMINATOR}`; +const wrapAnsiHyperlink = uri => `${ESCAPES.values().next().value}${ANSI_ESCAPE_LINK}${uri}${ANSI_ESCAPE_BELL}`; + +// Calculate the length of words split on ' ', ignoring +// the extra characters added by ansi escape codes +const wordLengths = string => string.split(' ').map(character => stringWidth(character)); + +// Wrap a long word across multiple rows +// Ansi escape codes do not count towards length +const wrapWord = (rows, word, columns) => { + const characters = [...word]; + + let isInsideEscape = false; + let isInsideLinkEscape = false; + let visible = stringWidth(stripAnsi(rows[rows.length - 1])); + + for (const [index, character] of characters.entries()) { + const characterLength = stringWidth(character); + + if (visible + characterLength <= columns) { + rows[rows.length - 1] += character; + } else { + rows.push(character); + visible = 0; + } + + if (ESCAPES.has(character)) { + isInsideEscape = true; + isInsideLinkEscape = characters.slice(index + 1).join('').startsWith(ANSI_ESCAPE_LINK); + } + + if (isInsideEscape) { + if (isInsideLinkEscape) { + if (character === ANSI_ESCAPE_BELL) { + isInsideEscape = false; + isInsideLinkEscape = false; + } + } else if (character === ANSI_SGR_TERMINATOR) { + isInsideEscape = false; + } + + continue; + } + + visible += characterLength; + + if (visible === columns && index < characters.length - 1) { + rows.push(''); + visible = 0; + } + } + + // It's possible that the last row we copy over is only + // ansi escape characters, handle this edge-case + if (!visible && rows[rows.length - 1].length > 0 && rows.length > 1) { + rows[rows.length - 2] += rows.pop(); + } +}; + +// Trims spaces from a string ignoring invisible sequences +const stringVisibleTrimSpacesRight = string => { + const words = string.split(' '); + let last = words.length; + + while (last > 0) { + if (stringWidth(words[last - 1]) > 0) { + break; + } + + last--; + } + + if (last === words.length) { + return string; + } + + return words.slice(0, last).join(' ') + words.slice(last).join(''); +}; + +// The wrap-ansi module can be invoked in either 'hard' or 'soft' wrap mode +// +// 'hard' will never allow a string to take up more than columns characters +// +// 'soft' allows long words to expand past the column length +const exec = (string, columns, options = {}) => { + if (options.trim !== false && string.trim() === '') { + return ''; + } + + let returnValue = ''; + let escapeCode; + let escapeUrl; + + const lengths = wordLengths(string); + let rows = ['']; + + for (const [index, word] of string.split(' ').entries()) { + if (options.trim !== false) { + rows[rows.length - 1] = rows[rows.length - 1].trimStart(); + } + + let rowLength = stringWidth(rows[rows.length - 1]); + + if (index !== 0) { + if (rowLength >= columns && (options.wordWrap === false || options.trim === false)) { + // If we start with a new word but the current row length equals the length of the columns, add a new row + rows.push(''); + rowLength = 0; + } + + if (rowLength > 0 || options.trim === false) { + rows[rows.length - 1] += ' '; + rowLength++; + } + } + + // In 'hard' wrap mode, the length of a line is never allowed to extend past 'columns' + if (options.hard && lengths[index] > columns) { + const remainingColumns = (columns - rowLength); + const breaksStartingThisLine = 1 + Math.floor((lengths[index] - remainingColumns - 1) / columns); + const breaksStartingNextLine = Math.floor((lengths[index] - 1) / columns); + if (breaksStartingNextLine < breaksStartingThisLine) { + rows.push(''); + } + + wrapWord(rows, word, columns); + continue; + } + + if (rowLength + lengths[index] > columns && rowLength > 0 && lengths[index] > 0) { + if (options.wordWrap === false && rowLength < columns) { + wrapWord(rows, word, columns); + continue; + } + + rows.push(''); + } + + if (rowLength + lengths[index] > columns && options.wordWrap === false) { + wrapWord(rows, word, columns); + continue; + } + + rows[rows.length - 1] += word; + } + + if (options.trim !== false) { + rows = rows.map(row => stringVisibleTrimSpacesRight(row)); + } + + const pre = [...rows.join('\n')]; + + for (const [index, character] of pre.entries()) { + returnValue += character; + + if (ESCAPES.has(character)) { + const {groups} = new RegExp(`(?:\\${ANSI_CSI}(?\\d+)m|\\${ANSI_ESCAPE_LINK}(?.*)${ANSI_ESCAPE_BELL})`).exec(pre.slice(index).join('')) || {groups: {}}; + if (groups.code !== undefined) { + const code = Number.parseFloat(groups.code); + escapeCode = code === END_CODE ? undefined : code; + } else if (groups.uri !== undefined) { + escapeUrl = groups.uri.length === 0 ? undefined : groups.uri; + } + } + + const code = ansiStyles.codes.get(Number(escapeCode)); + + if (pre[index + 1] === '\n') { + if (escapeUrl) { + returnValue += wrapAnsiHyperlink(''); + } + + if (escapeCode && code) { + returnValue += wrapAnsiCode(code); + } + } else if (character === '\n') { + if (escapeCode && code) { + returnValue += wrapAnsiCode(escapeCode); + } + + if (escapeUrl) { + returnValue += wrapAnsiHyperlink(escapeUrl); + } + } + } + + return returnValue; +}; + +// For each newline, invoke the method separately +export default function wrapAnsi(string, columns, options) { + return String(string) + .normalize() + .replace(/\r\n/g, '\n') + .split('\n') + .map(line => exec(line, columns, options)) + .join('\n'); +} diff --git a/node_modules/@isaacs/cliui/node_modules/wrap-ansi/package.json b/node_modules/@isaacs/cliui/node_modules/wrap-ansi/package.json new file mode 100644 index 000000000..198a5dbcb --- /dev/null +++ b/node_modules/@isaacs/cliui/node_modules/wrap-ansi/package.json @@ -0,0 +1,69 @@ +{ + "name": "wrap-ansi", + "version": "8.1.0", + "description": "Wordwrap a string with ANSI escape codes", + "license": "MIT", + "repository": "chalk/wrap-ansi", + "funding": "https://github.com/chalk/wrap-ansi?sponsor=1", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": { + "types": "./index.d.ts", + "default": "./index.js" + }, + "engines": { + "node": ">=12" + }, + "scripts": { + "test": "xo && nyc ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "wrap", + "break", + "wordwrap", + "wordbreak", + "linewrap", + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "devDependencies": { + "ava": "^3.15.0", + "chalk": "^4.1.2", + "coveralls": "^3.1.1", + "has-ansi": "^5.0.1", + "nyc": "^15.1.0", + "tsd": "^0.25.0", + "xo": "^0.44.0" + } +} diff --git a/node_modules/@isaacs/cliui/package.json b/node_modules/@isaacs/cliui/package.json new file mode 100644 index 000000000..7a952532d --- /dev/null +++ b/node_modules/@isaacs/cliui/package.json @@ -0,0 +1,86 @@ +{ + "name": "@isaacs/cliui", + "version": "8.0.2", + "description": "easily create complex multi-column command-line-interfaces", + "main": "build/index.cjs", + "exports": { + ".": [ + { + "import": "./index.mjs", + "require": "./build/index.cjs" + }, + "./build/index.cjs" + ] + }, + "type": "module", + "module": "./index.mjs", + "scripts": { + "check": "standardx '**/*.ts' && standardx '**/*.js' && standardx '**/*.cjs'", + "fix": "standardx --fix '**/*.ts' && standardx --fix '**/*.js' && standardx --fix '**/*.cjs'", + "pretest": "rimraf build && tsc -p tsconfig.test.json && cross-env NODE_ENV=test npm run build:cjs", + "test": "c8 mocha ./test/*.cjs", + "test:esm": "c8 mocha ./test/**/*.mjs", + "postest": "check", + "coverage": "c8 report --check-coverage", + "precompile": "rimraf build", + "compile": "tsc", + "postcompile": "npm run build:cjs", + "build:cjs": "rollup -c", + "prepare": "npm run compile" + }, + "repository": "yargs/cliui", + "standard": { + "ignore": [ + "**/example/**" + ], + "globals": [ + "it" + ] + }, + "keywords": [ + "cli", + "command-line", + "layout", + "design", + "console", + "wrap", + "table" + ], + "author": "Ben Coe ", + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "devDependencies": { + "@types/node": "^14.0.27", + "@typescript-eslint/eslint-plugin": "^4.0.0", + "@typescript-eslint/parser": "^4.0.0", + "c8": "^7.3.0", + "chai": "^4.2.0", + "chalk": "^4.1.0", + "cross-env": "^7.0.2", + "eslint": "^7.6.0", + "eslint-plugin-import": "^2.22.0", + "eslint-plugin-node": "^11.1.0", + "gts": "^3.0.0", + "mocha": "^10.0.0", + "rimraf": "^3.0.2", + "rollup": "^2.23.1", + "rollup-plugin-ts": "^3.0.2", + "standardx": "^7.0.0", + "typescript": "^4.0.0" + }, + "files": [ + "build", + "index.mjs", + "!*.d.ts" + ], + "engines": { + "node": ">=12" + } +} diff --git a/node_modules/@lhci/cli/package.json b/node_modules/@lhci/cli/package.json index 3ad515e3d..cd19f1878 100644 --- a/node_modules/@lhci/cli/package.json +++ b/node_modules/@lhci/cli/package.json @@ -1,6 +1,6 @@ { "name": "@lhci/cli", - "version": "0.13.0", + "version": "0.14.0", "license": "Apache-2.0", "repository": { "type": "git", @@ -13,21 +13,21 @@ "lhci": "./src/cli.js" }, "dependencies": { - "@lhci/utils": "0.13.0", + "@lhci/utils": "0.14.0", "chrome-launcher": "^0.13.4", "compression": "^1.7.4", "debug": "^4.3.1", "express": "^4.17.1", - "https-proxy-agent": "^5.0.0", "inquirer": "^6.3.1", "isomorphic-fetch": "^3.0.0", - "lighthouse": "11.4.0", + "lighthouse": "12.1.0", "lighthouse-logger": "1.2.0", "open": "^7.1.0", + "proxy-agent": "^6.4.0", "tmp": "^0.1.0", "uuid": "^8.3.1", "yargs": "^15.4.1", "yargs-parser": "^13.1.2" }, - "gitHead": "7cc4ba8369805dae04e396186ad552ccb10bd702" + "gitHead": "36e629e9c03a2b328f5996c16f256431c5fef1fe" } diff --git a/node_modules/@lhci/cli/src/assert/assert.js b/node_modules/@lhci/cli/src/assert/assert.js index e9bedabd6..5cabd9c85 100644 --- a/node_modules/@lhci/cli/src/assert/assert.js +++ b/node_modules/@lhci/cli/src/assert/assert.js @@ -32,6 +32,10 @@ function buildCommand(yargs) { type: 'boolean', description: 'Whether to include the results of passed assertions in the output.', }, + lhr: { + description: + 'Path to LHRs (either a folder or a single file path). Not recursive. If not provided, .lighthouseci is used', + }, }); } @@ -53,7 +57,7 @@ async function runCommand(options) { // If we have a budgets file, convert it to our assertions format. if (budgetsFile) options = await convertBudgetsToAssertions(readBudgets(budgetsFile)); - const lhrs = loadSavedLHRs().map(json => JSON.parse(json)); + const lhrs = loadSavedLHRs(options.lhr).map(json => JSON.parse(json)); const uniqueUrls = new Set(lhrs.map(lhr => lhr.finalUrl)); const allResults = getAllAssertionResults(options, lhrs); const groupedResults = _.groupBy(allResults, result => result.url); diff --git a/node_modules/@lhci/cli/src/collect/fallback-server.js b/node_modules/@lhci/cli/src/collect/fallback-server.js index 33c21d7a5..aedfe8565 100644 --- a/node_modules/@lhci/cli/src/collect/fallback-server.js +++ b/node_modules/@lhci/cli/src/collect/fallback-server.js @@ -102,7 +102,9 @@ class FallbackServer { .filter(fileOrDir => fileOrDir.isDirectory()) .map(dir => dir.name); - const htmlFiles = files.filter(file => file.endsWith('.html')).map(file => ({file, depth: 0})); + const htmlFiles = files + .filter(file => file.endsWith('.html') || file.endsWith('.htm')) + .map(file => ({file, depth: 0})); if (depth === 0) return htmlFiles; diff --git a/node_modules/@lhci/cli/src/fetch.js b/node_modules/@lhci/cli/src/fetch.js index 44824cd68..c4e4fc3f5 100644 --- a/node_modules/@lhci/cli/src/fetch.js +++ b/node_modules/@lhci/cli/src/fetch.js @@ -6,17 +6,20 @@ 'use strict'; const fetch = require('isomorphic-fetch'); -const {HttpsProxyAgent} = require('https-proxy-agent'); +const {ProxyAgent} = require('proxy-agent'); /** @type import('isomorphic-fetch') */ module.exports = (url, options) => { - /** @type {Parameters[1] & { agent?: import('https-proxy-agent').HttpsProxyAgent }} */ + /** @type {Parameters[1] & { agent?: import('proxy-agent').ProxyAgent }} */ const instanceOptions = { ...options, }; - if (!instanceOptions.agent && process.env.HTTP_PROXY) { - instanceOptions.agent = new HttpsProxyAgent(process.env.HTTP_PROXY); + if ( + !instanceOptions.agent && + (process.env.HTTP_PROXY || process.env.HTTPS_PROXY || process.env.NO_PROXY) + ) { + instanceOptions.agent = new ProxyAgent(); } return fetch(url, instanceOptions); diff --git a/node_modules/@lhci/utils/package.json b/node_modules/@lhci/utils/package.json index 5bb6c47bc..732c9ddf6 100644 --- a/node_modules/@lhci/utils/package.json +++ b/node_modules/@lhci/utils/package.json @@ -1,6 +1,6 @@ { "name": "@lhci/utils", - "version": "0.13.0", + "version": "0.14.0", "license": "Apache-2.0", "repository": { "type": "git", @@ -13,8 +13,8 @@ "debug": "^4.3.1", "isomorphic-fetch": "^3.0.0", "js-yaml": "^3.13.1", - "lighthouse": "11.4.0", + "lighthouse": "12.1.0", "tree-kill": "^1.2.1" }, - "gitHead": "7cc4ba8369805dae04e396186ad552ccb10bd702" + "gitHead": "36e629e9c03a2b328f5996c16f256431c5fef1fe" } diff --git a/node_modules/lighthouse/core/config/budget.js b/node_modules/@lhci/utils/src/budget.js similarity index 84% rename from node_modules/lighthouse/core/config/budget.js rename to node_modules/@lhci/utils/src/budget.js index 58730062f..00a930a8f 100644 --- a/node_modules/lighthouse/core/config/budget.js +++ b/node_modules/@lhci/utils/src/budget.js @@ -1,3 +1,6 @@ +// @ts-nocheck - grabbed from Lighthouse repo. +'use strict'; + /** * @license * Copyright 2019 Google LLC @@ -83,8 +86,10 @@ class Budget { ]; // Assume resourceType is an allowed string, throw if not. if (!validResourceTypes.includes(/** @type {LH.Budget.ResourceType} */ (resourceType))) { - throw new Error(`Invalid resource type: ${resourceType}. \n` + - `Valid resource types are: ${validResourceTypes.join(', ') }`); + throw new Error( + `Invalid resource type: ${resourceType}. \n` + + `Valid resource types are: ${validResourceTypes.join(', ')}` + ); } if (!isNumber(budget)) { throw new Error(`Invalid budget: ${budget}`); @@ -100,13 +105,14 @@ class Budget { * @param {string} error */ static throwInvalidPathError(path, error) { - throw new Error(`Invalid path ${path}. ${error}\n` + - `'Path' should be specified using the 'robots.txt' format.\n` + - `Learn more about the 'robots.txt' format here:\n` + - `https://developers.google.com/search/reference/robots_txt#url-matching-based-on-path-values`); + throw new Error( + `Invalid path ${path}. ${error}\n` + + `'Path' should be specified using the 'robots.txt' format.\n` + + `Learn more about the 'robots.txt' format here:\n` + + `https://developers.google.com/search/reference/robots_txt#url-matching-based-on-path-values` + ); } - /** * Validates that path is either: a) undefined or ) properly formed. * Verifies the quantity and location of the two robot.txt regex characters: $, * @@ -176,29 +182,29 @@ class Budget { */ if (!hasWildcard && !hasDollarSign) { return urlPath.startsWith(pattern); - /** - * Case #2: $ only - * Example: "/js$" - * Behavior: URL should be identical to pattern. - */ + /** + * Case #2: $ only + * Example: "/js$" + * Behavior: URL should be identical to pattern. + */ } else if (!hasWildcard && hasDollarSign) { return urlPath === pattern.slice(0, -1); - /** - * Case #3: * only - * Example: "/vendor*chunk" - * Behavior: URL should start with the string pattern that comes before the wildcard - * & later in the string contain the string pattern that comes after the wildcard. - */ + /** + * Case #3: * only + * Example: "/vendor*chunk" + * Behavior: URL should start with the string pattern that comes before the wildcard + * & later in the string contain the string pattern that comes after the wildcard. + */ } else if (hasWildcard && !hasDollarSign) { const [beforeWildcard, afterWildcard] = pattern.split('*'); const remainingUrl = urlPath.slice(beforeWildcard.length); return urlPath.startsWith(beforeWildcard) && remainingUrl.includes(afterWildcard); - /** - * Case #4: $ and * - * Example: "/vendor*chunk.js$" - * Behavior: URL should start with the string pattern that comes before the wildcard - * & later in the string end with the string pattern that comes after the wildcard. - */ + /** + * Case #4: $ and * + * Example: "/vendor*chunk.js$" + * Behavior: URL should start with the string pattern that comes before the wildcard + * & later in the string end with the string pattern that comes after the wildcard. + */ } else if (hasWildcard && hasDollarSign) { const [beforeWildcard, afterWildcard] = pattern.split('*'); const urlEnd = urlPath.slice(beforeWildcard.length); @@ -228,8 +234,10 @@ class Budget { ]; // Assume metric is an allowed string, throw if not. if (!validTimingMetrics.includes(/** @type {LH.Budget.TimingMetric} */ (metric))) { - throw new Error(`Invalid timing metric: ${metric}. \n` + - `Valid timing metrics are: ${validTimingMetrics.join(', ')}`); + throw new Error( + `Invalid timing metric: ${metric}. \n` + + `Valid timing metrics are: ${validTimingMetrics.join(', ')}` + ); } if (!isNumber(budget)) { throw new Error(`Invalid budget: ${budget}`); @@ -308,24 +316,30 @@ class Budget { if (isArrayOfUnknownObjects(resourceSizes)) { budget.resourceSizes = resourceSizes.map(Budget.validateResourceBudget); - Budget.assertNoDuplicateStrings(budget.resourceSizes.map(r => r.resourceType), - `budgets[${index}].resourceSizes`); + Budget.assertNoDuplicateStrings( + budget.resourceSizes.map(r => r.resourceType), + `budgets[${index}].resourceSizes` + ); } else if (resourceSizes !== undefined) { throw new Error(`Invalid resourceSizes entry in budget at index ${index}`); } if (isArrayOfUnknownObjects(resourceCounts)) { budget.resourceCounts = resourceCounts.map(Budget.validateResourceBudget); - Budget.assertNoDuplicateStrings(budget.resourceCounts.map(r => r.resourceType), - `budgets[${index}].resourceCounts`); + Budget.assertNoDuplicateStrings( + budget.resourceCounts.map(r => r.resourceType), + `budgets[${index}].resourceCounts` + ); } else if (resourceCounts !== undefined) { throw new Error(`Invalid resourceCounts entry in budget at index ${index}`); } if (isArrayOfUnknownObjects(timings)) { budget.timings = timings.map(Budget.validateTimingBudget); - Budget.assertNoDuplicateStrings(budget.timings.map(r => r.metric), - `budgets[${index}].timings`); + Budget.assertNoDuplicateStrings( + budget.timings.map(r => r.metric), + `budgets[${index}].timings` + ); } else if (timings !== undefined) { throw new Error(`Invalid timings entry in budget at index ${index}`); } @@ -337,4 +351,4 @@ class Budget { } } -export {Budget}; +module.exports = {Budget}; diff --git a/node_modules/@lhci/utils/src/budgets-converter.js b/node_modules/@lhci/utils/src/budgets-converter.js index 1a57a516f..4b667ef76 100644 --- a/node_modules/@lhci/utils/src/budgets-converter.js +++ b/node_modules/@lhci/utils/src/budgets-converter.js @@ -5,6 +5,8 @@ */ 'use strict'; +const {Budget} = require('./budget.js'); + /** * @param {string|undefined} path * @return {RegExp} @@ -18,12 +20,12 @@ function convertPathExpressionToRegExp(path) { return new RegExp(`https?://[^/]+${escapedPath}`); } +// TODO: make this not async, and propagate that change to callers as possible. /** * @param {Array} budgets * @return {Promise} */ async function convertBudgetsToAssertions(budgets) { - const {Budget} = await import('lighthouse/core/config/budget.js'); // Normalize the definition using built-in Lighthouse validation. budgets = Budget.initializeBudget(budgets); diff --git a/node_modules/@lhci/utils/src/presets/all.js b/node_modules/@lhci/utils/src/presets/all.js index 4ccc8ab60..558f0e94c 100644 --- a/node_modules/@lhci/utils/src/presets/all.js +++ b/node_modules/@lhci/utils/src/presets/all.js @@ -17,7 +17,7 @@ module.exports = { 'final-screenshot': ['off', {}], 'js-libraries': ['off', {}], 'largest-contentful-paint-element': ['off', {}], - 'layout-shift-elements': ['off', {}], + 'layout-shifts': ['off', {}], 'main-thread-tasks': ['off', {}], 'network-requests': ['off', {}], 'network-rtt': ['off', {}], @@ -34,12 +34,15 @@ module.exports = { 'aria-allowed-attr': ['error', {}], 'aria-allowed-role': ['error', {}], 'aria-command-name': ['error', {}], + 'aria-conditional-attr': ['error', {}], + 'aria-deprecated-role': ['error', {}], 'aria-dialog-name': ['error', {}], 'aria-hidden-body': ['error', {}], 'aria-hidden-focus': ['error', {}], 'aria-input-field-name': ['error', {}], 'aria-meter-name': ['error', {}], 'aria-progressbar-name': ['error', {}], + 'aria-prohibited-attr': ['error', {}], 'aria-required-attr': ['error', {}], 'aria-required-children': ['error', {}], 'aria-required-parent': ['error', {}], @@ -54,14 +57,12 @@ module.exports = { 'bootup-time': ['error', {}], 'button-name': ['error', {}], 'color-contrast': ['error', {}], - 'content-width': ['error', {}], 'crawlable-anchors': ['error', {}], 'csp-xss': ['error', {}], 'cumulative-layout-shift': ['error', {}], 'definition-list': ['error', {}], 'document-title': ['error', {}], 'dom-size': ['error', {}], - 'duplicate-id-active': ['error', {}], 'duplicate-id-aria': ['error', {}], 'duplicated-javascript': ['error', {}], 'efficient-animated-content': ['error', {}], @@ -87,7 +88,6 @@ module.exports = { 'input-button-name': ['error', {}], 'input-image-alt': ['error', {}], 'inspector-issues': ['error', {}], - 'installable-manifest': ['error', {}], 'is-crawlable': ['error', {}], 'is-on-https': ['error', {}], 'label-content-name-mismatch': ['error', {}], @@ -100,40 +100,33 @@ module.exports = { 'link-text': ['error', {}], 'long-tasks': ['error', {}], 'mainthread-work-breakdown': ['error', {}], - 'maskable-icon': ['error', {}], 'max-potential-fid': ['error', {}], 'meta-description': ['error', {}], 'meta-refresh': ['error', {}], 'meta-viewport': ['error', {}], 'modern-image-formats': ['error', {}], 'no-document-write': ['error', {}], - 'no-unload-listeners': ['error', {}], 'non-composited-animations': ['error', {}], 'notification-on-start': ['error', {}], 'object-alt': ['error', {}], 'offscreen-images': ['error', {}], 'paste-preventing-inputs': ['error', {}], - 'performance-budget': ['error', {}], - 'preload-fonts': ['error', {}], 'prioritize-lcp-image': ['error', {}], + 'redirects-http': ['error', {}], 'render-blocking-resources': ['error', {}], 'robots-txt': ['error', {}], 'select-name': ['error', {}], 'server-response-time': ['error', {}], 'skip-link': ['error', {}], 'speed-index': ['error', {}], - 'splash-screen': ['error', {}], 'table-duplicate-name': ['error', {}], 'table-fake-caption': ['error', {}], - 'tap-targets': ['error', {}], 'target-size': ['error', {}], 'td-has-header': ['error', {}], 'td-headers-attr': ['error', {}], 'th-has-data-cells': ['error', {}], - 'themed-omnibox': ['error', {}], 'third-party-cookies': ['error', {}], 'third-party-facades': ['error', {}], - 'timing-budget': ['error', {}], 'total-byte-weight': ['error', {}], 'unminified-css': ['error', {}], 'unminified-javascript': ['error', {}], @@ -145,7 +138,6 @@ module.exports = { 'uses-optimized-images': ['error', {}], 'uses-passive-event-listeners': ['error', {}], 'uses-rel-preconnect': ['error', {}], - 'uses-rel-preload': ['error', {}], 'uses-responsive-images': ['error', {}], 'uses-text-compression': ['error', {}], 'valid-lang': ['error', {}], @@ -163,7 +155,6 @@ module.exports = { label: ['error', {}], list: ['error', {}], listitem: ['error', {}], - plugins: ['error', {}], redirects: ['error', {}], tabindex: ['error', {}], viewport: ['error', {}], diff --git a/node_modules/@lhci/utils/src/presets/no-pwa.js b/node_modules/@lhci/utils/src/presets/no-pwa.js index 09535e72b..8bf57a5e0 100644 --- a/node_modules/@lhci/utils/src/presets/no-pwa.js +++ b/node_modules/@lhci/utils/src/presets/no-pwa.js @@ -7,16 +7,13 @@ const recommended = require('./recommended.js'); +// TODO: PWA doesn't exist anymore, so remove? + module.exports = { assertions: { ...recommended.assertions, // Every PWA audit is disabled 'is-on-https': 'off', - 'installable-manifest': 'off', - 'splash-screen': 'off', - 'themed-omnibox': 'off', - 'content-width': 'off', viewport: 'off', - 'maskable-icon': 'off', }, }; diff --git a/node_modules/@lhci/utils/src/presets/recommended.js b/node_modules/@lhci/utils/src/presets/recommended.js index aca0d9464..a39bebe3b 100644 --- a/node_modules/@lhci/utils/src/presets/recommended.js +++ b/node_modules/@lhci/utils/src/presets/recommended.js @@ -19,7 +19,6 @@ module.exports = { 'mainthread-work-breakdown': ['warn', {}], 'max-potential-fid': ['warn', {}], 'speed-index': ['warn', {}], - 'uses-rel-preload': ['warn', {}], interactive: ['warn', {}], // Flaky score but non-flaky details (error, maxLength) 'duplicated-javascript': ['warn', {maxLength: 0}], // warn until https://github.com/GoogleChrome/lighthouse/issues/11285 is fixed diff --git a/node_modules/@lhci/utils/src/psi-client.js b/node_modules/@lhci/utils/src/psi-client.js index 5e29f56c2..21dd5ae7b 100644 --- a/node_modules/@lhci/utils/src/psi-client.js +++ b/node_modules/@lhci/utils/src/psi-client.js @@ -23,14 +23,14 @@ class PsiClient { /** * @param {string} urlToTest - * @param {{strategy?: 'mobile'|'desktop', locale?: string, categories?: Array<'performance' | 'accessibility' | 'best-practices' | 'pwa' | 'seo'>}} [options] + * @param {{strategy?: 'mobile'|'desktop', locale?: string, categories?: Array<'performance' | 'accessibility' | 'best-practices' | 'seo'>}} [options] * @return {Promise} */ async run(urlToTest, options = {}) { const { strategy = 'mobile', locale = 'en_US', - categories = ['performance', 'accessibility', 'best-practices', 'pwa', 'seo'], + categories = ['performance', 'accessibility', 'best-practices', 'seo'], } = options; const url = new this._URL(this._endpointURL); url.searchParams.set('url', urlToTest); diff --git a/node_modules/@lhci/utils/src/psi-runner.js b/node_modules/@lhci/utils/src/psi-runner.js index 58efffb70..a1d4a70f4 100644 --- a/node_modules/@lhci/utils/src/psi-runner.js +++ b/node_modules/@lhci/utils/src/psi-runner.js @@ -19,7 +19,7 @@ class PsiRunner { /** * @param {string} url - * @param {{psiApiKey?: string, psiApiEndpoint?: string, psiStrategy?: 'mobile'|'desktop', psiCategories?: Array<'performance' | 'accessibility' | 'best-practices' | 'pwa' | 'seo'>}} [options] + * @param {{psiApiKey?: string, psiApiEndpoint?: string, psiStrategy?: 'mobile'|'desktop', psiCategories?: Array<'performance' | 'accessibility' | 'best-practices' | 'seo'>}} [options] * @return {Promise} */ async run(url, options) { @@ -34,7 +34,7 @@ class PsiRunner { /** * @param {string} url - * @param {{psiApiKey?: string, psiApiEndpoint?: string, psiStrategy?: 'mobile'|'desktop', psiCategories?: Array<'performance' | 'accessibility' | 'best-practices' | 'pwa' | 'seo'>}} [options] + * @param {{psiApiKey?: string, psiApiEndpoint?: string, psiStrategy?: 'mobile'|'desktop', psiCategories?: Array<'performance' | 'accessibility' | 'best-practices' | 'seo'>}} [options] * @return {Promise} */ async runUntilSuccess(url, options = {}) { diff --git a/node_modules/@lhci/utils/src/saved-reports.js b/node_modules/@lhci/utils/src/saved-reports.js index 62c80a922..739f2da2a 100644 --- a/node_modules/@lhci/utils/src/saved-reports.js +++ b/node_modules/@lhci/utils/src/saved-reports.js @@ -18,14 +18,23 @@ function ensureDirectoryExists(baseDir = LHCI_DIR) { } /** + * @param {string} [directoryOrPath] * @return {string[]} */ -function loadSavedLHRs() { - ensureDirectoryExists(); +function loadSavedLHRs(directoryOrPath) { + directoryOrPath = directoryOrPath || LHCI_DIR; + + if (directoryOrPath === LHCI_DIR) { + ensureDirectoryExists(); + } + + if (fs.lstatSync(directoryOrPath).isFile()) { + return [fs.readFileSync(directoryOrPath, 'utf8')]; + } /** @type {string[]} */ const lhrs = []; - for (const file of fs.readdirSync(LHCI_DIR)) { + for (const file of fs.readdirSync(directoryOrPath)) { if (!LHR_REGEX.test(file)) continue; const filePath = path.join(LHCI_DIR, file); diff --git a/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types/package.json b/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types/package.json index ade207b2a..350a5825d 100644 --- a/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types/package.json +++ b/node_modules/@octokit/request-error/node_modules/@octokit/openapi-types/package.json @@ -7,14 +7,15 @@ "directory": "packages/openapi-types" }, "publishConfig": { - "access": "public" + "access": "public", + "provenance": true }, - "version": "20.0.0", + "version": "22.2.0", "main": "", "types": "types.d.ts", "author": "Gregor Martynus (https://twitter.com/gr2m)", "license": "MIT", "octokit": { - "openapi-version": "14.0.0" + "openapi-version": "16.5.0" } } diff --git a/node_modules/@octokit/request-error/node_modules/@octokit/types/package.json b/node_modules/@octokit/request-error/node_modules/@octokit/types/package.json index 99959575e..7ee12d257 100644 --- a/node_modules/@octokit/request-error/node_modules/@octokit/types/package.json +++ b/node_modules/@octokit/request-error/node_modules/@octokit/types/package.json @@ -1,12 +1,13 @@ { "name": "@octokit/types", - "version": "12.6.0", + "version": "13.5.0", "publishConfig": { - "access": "public" + "access": "public", + "provenance": true }, "description": "Shared TypeScript definitions for Octokit projects", "dependencies": { - "@octokit/openapi-types": "^20.0.0" + "@octokit/openapi-types": "^22.2.0" }, "repository": "github:octokit/types.ts", "keywords": [ @@ -23,10 +24,10 @@ "@types/node": ">= 8", "github-openapi-graphql-query": "^4.0.0", "handlebars": "^4.7.6", - "json-schema-to-typescript": "^13.0.0", + "json-schema-to-typescript": "^14.0.0", "lodash.set": "^4.3.2", "npm-run-all2": "^6.0.0", - "pascal-case": "^3.1.1", + "pascal-case": "^4.0.0", "prettier": "^3.0.0", "semantic-release": "^23.0.0", "semantic-release-plugin-update-version-in-files": "^1.0.0", @@ -36,7 +37,7 @@ "typescript": "^5.0.0" }, "octokit": { - "openapi-version": "14.0.0" + "openapi-version": "16.5.0" }, "files": [ "dist-types/**" diff --git a/node_modules/@octokit/request-error/package.json b/node_modules/@octokit/request-error/package.json index c5c6af8c3..42dd8ed5e 100644 --- a/node_modules/@octokit/request-error/package.json +++ b/node_modules/@octokit/request-error/package.json @@ -1,8 +1,9 @@ { "name": "@octokit/request-error", - "version": "5.0.1", + "version": "5.1.0", "publishConfig": { - "access": "public" + "access": "public", + "provenance": true }, "description": "Error class for Octokit request errors", "repository": "github:octokit/request-error.js", @@ -15,7 +16,7 @@ "author": "Gregor Martynus (https://github.com/gr2m)", "license": "MIT", "dependencies": { - "@octokit/types": "^12.0.0", + "@octokit/types": "^13.1.0", "deprecation": "^2.0.0", "once": "^1.4.0" }, diff --git a/node_modules/@opentelemetry/api/build/esm/api/context.js b/node_modules/@opentelemetry/api/build/esm/api/context.js deleted file mode 100644 index 0d02f9723..000000000 --- a/node_modules/@opentelemetry/api/build/esm/api/context.js +++ /dev/null @@ -1,110 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __read = (this && this.__read) || function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; -}; -var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); -}; -import { NoopContextManager } from '../context/NoopContextManager'; -import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; -import { DiagAPI } from './diag'; -var API_NAME = 'context'; -var NOOP_CONTEXT_MANAGER = new NoopContextManager(); -/** - * Singleton object which represents the entry point to the OpenTelemetry Context API - */ -var ContextAPI = /** @class */ (function () { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - function ContextAPI() { - } - /** Get the singleton instance of the Context API */ - ContextAPI.getInstance = function () { - if (!this._instance) { - this._instance = new ContextAPI(); - } - return this._instance; - }; - /** - * Set the current context manager. - * - * @returns true if the context manager was successfully registered, else false - */ - ContextAPI.prototype.setGlobalContextManager = function (contextManager) { - return registerGlobal(API_NAME, contextManager, DiagAPI.instance()); - }; - /** - * Get the currently active context - */ - ContextAPI.prototype.active = function () { - return this._getContextManager().active(); - }; - /** - * Execute a function with an active context - * - * @param context context to be active during function execution - * @param fn function to execute in a context - * @param thisArg optional receiver to be used for calling fn - * @param args optional arguments forwarded to fn - */ - ContextAPI.prototype.with = function (context, fn, thisArg) { - var _a; - var args = []; - for (var _i = 3; _i < arguments.length; _i++) { - args[_i - 3] = arguments[_i]; - } - return (_a = this._getContextManager()).with.apply(_a, __spreadArray([context, fn, thisArg], __read(args), false)); - }; - /** - * Bind a context to a target function or event emitter - * - * @param context context to bind to the event emitter or function. Defaults to the currently active context - * @param target function or event emitter to bind - */ - ContextAPI.prototype.bind = function (context, target) { - return this._getContextManager().bind(context, target); - }; - ContextAPI.prototype._getContextManager = function () { - return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER; - }; - /** Disable and remove the global context manager */ - ContextAPI.prototype.disable = function () { - this._getContextManager().disable(); - unregisterGlobal(API_NAME, DiagAPI.instance()); - }; - return ContextAPI; -}()); -export { ContextAPI }; -//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/diag.js b/node_modules/@opentelemetry/api/build/esm/api/diag.js deleted file mode 100644 index 25eb9afc1..000000000 --- a/node_modules/@opentelemetry/api/build/esm/api/diag.js +++ /dev/null @@ -1,121 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __read = (this && this.__read) || function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; -}; -var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); -}; -import { DiagComponentLogger } from '../diag/ComponentLogger'; -import { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger'; -import { DiagLogLevel, } from '../diag/types'; -import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; -var API_NAME = 'diag'; -/** - * Singleton object which represents the entry point to the OpenTelemetry internal - * diagnostic API - */ -var DiagAPI = /** @class */ (function () { - /** - * Private internal constructor - * @private - */ - function DiagAPI() { - function _logProxy(funcName) { - return function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - var logger = getGlobal('diag'); - // shortcut if logger not set - if (!logger) - return; - return logger[funcName].apply(logger, __spreadArray([], __read(args), false)); - }; - } - // Using self local variable for minification purposes as 'this' cannot be minified - var self = this; - // DiagAPI specific functions - var setLogger = function (logger, optionsOrLogLevel) { - var _a, _b, _c; - if (optionsOrLogLevel === void 0) { optionsOrLogLevel = { logLevel: DiagLogLevel.INFO }; } - if (logger === self) { - // There isn't much we can do here. - // Logging to the console might break the user application. - // Try to log to self. If a logger was previously registered it will receive the log. - var err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); - self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); - return false; - } - if (typeof optionsOrLogLevel === 'number') { - optionsOrLogLevel = { - logLevel: optionsOrLogLevel, - }; - } - var oldLogger = getGlobal('diag'); - var newLogger = createLogLevelDiagLogger((_b = optionsOrLogLevel.logLevel) !== null && _b !== void 0 ? _b : DiagLogLevel.INFO, logger); - // There already is an logger registered. We'll let it know before overwriting it. - if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) { - var stack = (_c = new Error().stack) !== null && _c !== void 0 ? _c : ''; - oldLogger.warn("Current logger will be overwritten from " + stack); - newLogger.warn("Current logger will overwrite one already registered from " + stack); - } - return registerGlobal('diag', newLogger, self, true); - }; - self.setLogger = setLogger; - self.disable = function () { - unregisterGlobal(API_NAME, self); - }; - self.createComponentLogger = function (options) { - return new DiagComponentLogger(options); - }; - self.verbose = _logProxy('verbose'); - self.debug = _logProxy('debug'); - self.info = _logProxy('info'); - self.warn = _logProxy('warn'); - self.error = _logProxy('error'); - } - /** Get the singleton instance of the DiagAPI API */ - DiagAPI.instance = function () { - if (!this._instance) { - this._instance = new DiagAPI(); - } - return this._instance; - }; - return DiagAPI; -}()); -export { DiagAPI }; -//# sourceMappingURL=diag.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/metrics.js b/node_modules/@opentelemetry/api/build/esm/api/metrics.js deleted file mode 100644 index 92c575a4c..000000000 --- a/node_modules/@opentelemetry/api/build/esm/api/metrics.js +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { NOOP_METER_PROVIDER } from '../metrics/NoopMeterProvider'; -import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; -import { DiagAPI } from './diag'; -var API_NAME = 'metrics'; -/** - * Singleton object which represents the entry point to the OpenTelemetry Metrics API - */ -var MetricsAPI = /** @class */ (function () { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - function MetricsAPI() { - } - /** Get the singleton instance of the Metrics API */ - MetricsAPI.getInstance = function () { - if (!this._instance) { - this._instance = new MetricsAPI(); - } - return this._instance; - }; - /** - * Set the current global meter provider. - * Returns true if the meter provider was successfully registered, else false. - */ - MetricsAPI.prototype.setGlobalMeterProvider = function (provider) { - return registerGlobal(API_NAME, provider, DiagAPI.instance()); - }; - /** - * Returns the global meter provider. - */ - MetricsAPI.prototype.getMeterProvider = function () { - return getGlobal(API_NAME) || NOOP_METER_PROVIDER; - }; - /** - * Returns a meter from the global meter provider. - */ - MetricsAPI.prototype.getMeter = function (name, version, options) { - return this.getMeterProvider().getMeter(name, version, options); - }; - /** Remove the global meter provider */ - MetricsAPI.prototype.disable = function () { - unregisterGlobal(API_NAME, DiagAPI.instance()); - }; - return MetricsAPI; -}()); -export { MetricsAPI }; -//# sourceMappingURL=metrics.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/propagation.js b/node_modules/@opentelemetry/api/build/esm/api/propagation.js deleted file mode 100644 index d3f6f8322..000000000 --- a/node_modules/@opentelemetry/api/build/esm/api/propagation.js +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; -import { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator'; -import { defaultTextMapGetter, defaultTextMapSetter, } from '../propagation/TextMapPropagator'; -import { getBaggage, getActiveBaggage, setBaggage, deleteBaggage, } from '../baggage/context-helpers'; -import { createBaggage } from '../baggage/utils'; -import { DiagAPI } from './diag'; -var API_NAME = 'propagation'; -var NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator(); -/** - * Singleton object which represents the entry point to the OpenTelemetry Propagation API - */ -var PropagationAPI = /** @class */ (function () { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - function PropagationAPI() { - this.createBaggage = createBaggage; - this.getBaggage = getBaggage; - this.getActiveBaggage = getActiveBaggage; - this.setBaggage = setBaggage; - this.deleteBaggage = deleteBaggage; - } - /** Get the singleton instance of the Propagator API */ - PropagationAPI.getInstance = function () { - if (!this._instance) { - this._instance = new PropagationAPI(); - } - return this._instance; - }; - /** - * Set the current propagator. - * - * @returns true if the propagator was successfully registered, else false - */ - PropagationAPI.prototype.setGlobalPropagator = function (propagator) { - return registerGlobal(API_NAME, propagator, DiagAPI.instance()); - }; - /** - * Inject context into a carrier to be propagated inter-process - * - * @param context Context carrying tracing data to inject - * @param carrier carrier to inject context into - * @param setter Function used to set values on the carrier - */ - PropagationAPI.prototype.inject = function (context, carrier, setter) { - if (setter === void 0) { setter = defaultTextMapSetter; } - return this._getGlobalPropagator().inject(context, carrier, setter); - }; - /** - * Extract context from a carrier - * - * @param context Context which the newly created context will inherit from - * @param carrier Carrier to extract context from - * @param getter Function used to extract keys from a carrier - */ - PropagationAPI.prototype.extract = function (context, carrier, getter) { - if (getter === void 0) { getter = defaultTextMapGetter; } - return this._getGlobalPropagator().extract(context, carrier, getter); - }; - /** - * Return a list of all fields which may be used by the propagator. - */ - PropagationAPI.prototype.fields = function () { - return this._getGlobalPropagator().fields(); - }; - /** Remove the global propagator */ - PropagationAPI.prototype.disable = function () { - unregisterGlobal(API_NAME, DiagAPI.instance()); - }; - PropagationAPI.prototype._getGlobalPropagator = function () { - return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; - }; - return PropagationAPI; -}()); -export { PropagationAPI }; -//# sourceMappingURL=propagation.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/api/trace.js b/node_modules/@opentelemetry/api/build/esm/api/trace.js deleted file mode 100644 index a4aa6e6ac..000000000 --- a/node_modules/@opentelemetry/api/build/esm/api/trace.js +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; -import { ProxyTracerProvider } from '../trace/ProxyTracerProvider'; -import { isSpanContextValid, wrapSpanContext, } from '../trace/spancontext-utils'; -import { deleteSpan, getActiveSpan, getSpan, getSpanContext, setSpan, setSpanContext, } from '../trace/context-utils'; -import { DiagAPI } from './diag'; -var API_NAME = 'trace'; -/** - * Singleton object which represents the entry point to the OpenTelemetry Tracing API - */ -var TraceAPI = /** @class */ (function () { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - function TraceAPI() { - this._proxyTracerProvider = new ProxyTracerProvider(); - this.wrapSpanContext = wrapSpanContext; - this.isSpanContextValid = isSpanContextValid; - this.deleteSpan = deleteSpan; - this.getSpan = getSpan; - this.getActiveSpan = getActiveSpan; - this.getSpanContext = getSpanContext; - this.setSpan = setSpan; - this.setSpanContext = setSpanContext; - } - /** Get the singleton instance of the Trace API */ - TraceAPI.getInstance = function () { - if (!this._instance) { - this._instance = new TraceAPI(); - } - return this._instance; - }; - /** - * Set the current global tracer. - * - * @returns true if the tracer provider was successfully registered, else false - */ - TraceAPI.prototype.setGlobalTracerProvider = function (provider) { - var success = registerGlobal(API_NAME, this._proxyTracerProvider, DiagAPI.instance()); - if (success) { - this._proxyTracerProvider.setDelegate(provider); - } - return success; - }; - /** - * Returns the global tracer provider. - */ - TraceAPI.prototype.getTracerProvider = function () { - return getGlobal(API_NAME) || this._proxyTracerProvider; - }; - /** - * Returns a tracer from the global tracer provider. - */ - TraceAPI.prototype.getTracer = function (name, version) { - return this.getTracerProvider().getTracer(name, version); - }; - /** Remove the global tracer provider */ - TraceAPI.prototype.disable = function () { - unregisterGlobal(API_NAME, DiagAPI.instance()); - this._proxyTracerProvider = new ProxyTracerProvider(); - }; - return TraceAPI; -}()); -export { TraceAPI }; -//# sourceMappingURL=trace.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js b/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js deleted file mode 100644 index 691133415..000000000 --- a/node_modules/@opentelemetry/api/build/esm/baggage/context-helpers.js +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { ContextAPI } from '../api/context'; -import { createContextKey } from '../context/context'; -/** - * Baggage key - */ -var BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key'); -/** - * Retrieve the current baggage from the given context - * - * @param {Context} Context that manage all context values - * @returns {Baggage} Extracted baggage from the context - */ -export function getBaggage(context) { - return context.getValue(BAGGAGE_KEY) || undefined; -} -/** - * Retrieve the current baggage from the active/current context - * - * @returns {Baggage} Extracted baggage from the context - */ -export function getActiveBaggage() { - return getBaggage(ContextAPI.getInstance().active()); -} -/** - * Store a baggage in the given context - * - * @param {Context} Context that manage all context values - * @param {Baggage} baggage that will be set in the actual context - */ -export function setBaggage(context, baggage) { - return context.setValue(BAGGAGE_KEY, baggage); -} -/** - * Delete the baggage stored in the given context - * - * @param {Context} Context that manage all context values - */ -export function deleteBaggage(context) { - return context.deleteValue(BAGGAGE_KEY); -} -//# sourceMappingURL=context-helpers.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js b/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js deleted file mode 100644 index c29d6852d..000000000 --- a/node_modules/@opentelemetry/api/build/esm/baggage/internal/baggage-impl.js +++ /dev/null @@ -1,98 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __read = (this && this.__read) || function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; -}; -var __values = (this && this.__values) || function(o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); -}; -var BaggageImpl = /** @class */ (function () { - function BaggageImpl(entries) { - this._entries = entries ? new Map(entries) : new Map(); - } - BaggageImpl.prototype.getEntry = function (key) { - var entry = this._entries.get(key); - if (!entry) { - return undefined; - } - return Object.assign({}, entry); - }; - BaggageImpl.prototype.getAllEntries = function () { - return Array.from(this._entries.entries()).map(function (_a) { - var _b = __read(_a, 2), k = _b[0], v = _b[1]; - return [k, v]; - }); - }; - BaggageImpl.prototype.setEntry = function (key, entry) { - var newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.set(key, entry); - return newBaggage; - }; - BaggageImpl.prototype.removeEntry = function (key) { - var newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.delete(key); - return newBaggage; - }; - BaggageImpl.prototype.removeEntries = function () { - var e_1, _a; - var keys = []; - for (var _i = 0; _i < arguments.length; _i++) { - keys[_i] = arguments[_i]; - } - var newBaggage = new BaggageImpl(this._entries); - try { - for (var keys_1 = __values(keys), keys_1_1 = keys_1.next(); !keys_1_1.done; keys_1_1 = keys_1.next()) { - var key = keys_1_1.value; - newBaggage._entries.delete(key); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (keys_1_1 && !keys_1_1.done && (_a = keys_1.return)) _a.call(keys_1); - } - finally { if (e_1) throw e_1.error; } - } - return newBaggage; - }; - BaggageImpl.prototype.clear = function () { - return new BaggageImpl(); - }; - return BaggageImpl; -}()); -export { BaggageImpl }; -//# sourceMappingURL=baggage-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js b/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js deleted file mode 100644 index 0e7dc36dd..000000000 --- a/node_modules/@opentelemetry/api/build/esm/baggage/internal/symbol.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * Symbol used to make BaggageEntryMetadata an opaque type - */ -export var baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); -//# sourceMappingURL=symbol.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/types.js b/node_modules/@opentelemetry/api/build/esm/baggage/types.js deleted file mode 100644 index 928faad02..000000000 --- a/node_modules/@opentelemetry/api/build/esm/baggage/types.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/baggage/utils.js b/node_modules/@opentelemetry/api/build/esm/baggage/utils.js deleted file mode 100644 index 3cc27165b..000000000 --- a/node_modules/@opentelemetry/api/build/esm/baggage/utils.js +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { DiagAPI } from '../api/diag'; -import { BaggageImpl } from './internal/baggage-impl'; -import { baggageEntryMetadataSymbol } from './internal/symbol'; -var diag = DiagAPI.instance(); -/** - * Create a new Baggage with optional entries - * - * @param entries An array of baggage entries the new baggage should contain - */ -export function createBaggage(entries) { - if (entries === void 0) { entries = {}; } - return new BaggageImpl(new Map(Object.entries(entries))); -} -/** - * Create a serializable BaggageEntryMetadata object from a string. - * - * @param str string metadata. Format is currently not defined by the spec and has no special meaning. - * - */ -export function baggageEntryMetadataFromString(str) { - if (typeof str !== 'string') { - diag.error("Cannot create baggage metadata from unknown type: " + typeof str); - str = ''; - } - return { - __TYPE__: baggageEntryMetadataSymbol, - toString: function () { - return str; - }, - }; -} -//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Attributes.js b/node_modules/@opentelemetry/api/build/esm/common/Attributes.js deleted file mode 100644 index dbb1e4977..000000000 --- a/node_modules/@opentelemetry/api/build/esm/common/Attributes.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=Attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Exception.js b/node_modules/@opentelemetry/api/build/esm/common/Exception.js deleted file mode 100644 index 6522a8e65..000000000 --- a/node_modules/@opentelemetry/api/build/esm/common/Exception.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=Exception.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/common/Time.js b/node_modules/@opentelemetry/api/build/esm/common/Time.js deleted file mode 100644 index 2abdf582f..000000000 --- a/node_modules/@opentelemetry/api/build/esm/common/Time.js +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=Time.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context-api.js b/node_modules/@opentelemetry/api/build/esm/context-api.js deleted file mode 100644 index b89fb257a..000000000 --- a/node_modules/@opentelemetry/api/build/esm/context-api.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -import { ContextAPI } from './api/context'; -/** Entrypoint for context API */ -export var context = ContextAPI.getInstance(); -//# sourceMappingURL=context-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js b/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js deleted file mode 100644 index 9794eff1e..000000000 --- a/node_modules/@opentelemetry/api/build/esm/context/NoopContextManager.js +++ /dev/null @@ -1,67 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __read = (this && this.__read) || function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; -}; -var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); -}; -import { ROOT_CONTEXT } from './context'; -var NoopContextManager = /** @class */ (function () { - function NoopContextManager() { - } - NoopContextManager.prototype.active = function () { - return ROOT_CONTEXT; - }; - NoopContextManager.prototype.with = function (_context, fn, thisArg) { - var args = []; - for (var _i = 3; _i < arguments.length; _i++) { - args[_i - 3] = arguments[_i]; - } - return fn.call.apply(fn, __spreadArray([thisArg], __read(args), false)); - }; - NoopContextManager.prototype.bind = function (_context, target) { - return target; - }; - NoopContextManager.prototype.enable = function () { - return this; - }; - NoopContextManager.prototype.disable = function () { - return this; - }; - return NoopContextManager; -}()); -export { NoopContextManager }; -//# sourceMappingURL=NoopContextManager.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/context.js b/node_modules/@opentelemetry/api/build/esm/context/context.js deleted file mode 100644 index f8909deeb..000000000 --- a/node_modules/@opentelemetry/api/build/esm/context/context.js +++ /dev/null @@ -1,52 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** Get a key to uniquely identify a context value */ -export function createContextKey(description) { - // The specification states that for the same input, multiple calls should - // return different keys. Due to the nature of the JS dependency management - // system, this creates problems where multiple versions of some package - // could hold different keys for the same property. - // - // Therefore, we use Symbol.for which returns the same key for the same input. - return Symbol.for(description); -} -var BaseContext = /** @class */ (function () { - /** - * Construct a new context which inherits values from an optional parent context. - * - * @param parentContext a context from which to inherit values - */ - function BaseContext(parentContext) { - // for minification - var self = this; - self._currentContext = parentContext ? new Map(parentContext) : new Map(); - self.getValue = function (key) { return self._currentContext.get(key); }; - self.setValue = function (key, value) { - var context = new BaseContext(self._currentContext); - context._currentContext.set(key, value); - return context; - }; - self.deleteValue = function (key) { - var context = new BaseContext(self._currentContext); - context._currentContext.delete(key); - return context; - }; - } - return BaseContext; -}()); -/** The root context is used as the default parent context when there is no active context */ -export var ROOT_CONTEXT = new BaseContext(); -//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/context/types.js b/node_modules/@opentelemetry/api/build/esm/context/types.js deleted file mode 100644 index 928faad02..000000000 --- a/node_modules/@opentelemetry/api/build/esm/context/types.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag-api.js b/node_modules/@opentelemetry/api/build/esm/diag-api.js deleted file mode 100644 index 9f85c1be3..000000000 --- a/node_modules/@opentelemetry/api/build/esm/diag-api.js +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -import { DiagAPI } from './api/diag'; -/** - * Entrypoint for Diag API. - * Defines Diagnostic handler used for internal diagnostic logging operations. - * The default provides a Noop DiagLogger implementation which may be changed via the - * diag.setLogger(logger: DiagLogger) function. - */ -export var diag = DiagAPI.instance(); -//# sourceMappingURL=diag-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js deleted file mode 100644 index 44bc8be66..000000000 --- a/node_modules/@opentelemetry/api/build/esm/diag/ComponentLogger.js +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __read = (this && this.__read) || function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; -}; -var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); -}; -import { getGlobal } from '../internal/global-utils'; -/** - * Component Logger which is meant to be used as part of any component which - * will add automatically additional namespace in front of the log message. - * It will then forward all message to global diag logger - * @example - * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); - * cLogger.debug('test'); - * // @opentelemetry/instrumentation-http test - */ -var DiagComponentLogger = /** @class */ (function () { - function DiagComponentLogger(props) { - this._namespace = props.namespace || 'DiagComponentLogger'; - } - DiagComponentLogger.prototype.debug = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('debug', this._namespace, args); - }; - DiagComponentLogger.prototype.error = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('error', this._namespace, args); - }; - DiagComponentLogger.prototype.info = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('info', this._namespace, args); - }; - DiagComponentLogger.prototype.warn = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('warn', this._namespace, args); - }; - DiagComponentLogger.prototype.verbose = function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - return logProxy('verbose', this._namespace, args); - }; - return DiagComponentLogger; -}()); -export { DiagComponentLogger }; -function logProxy(funcName, namespace, args) { - var logger = getGlobal('diag'); - // shortcut if logger not set - if (!logger) { - return; - } - args.unshift(namespace); - return logger[funcName].apply(logger, __spreadArray([], __read(args), false)); -} -//# sourceMappingURL=ComponentLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js deleted file mode 100644 index 5965b8aaf..000000000 --- a/node_modules/@opentelemetry/api/build/esm/diag/consoleLogger.js +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var consoleMap = [ - { n: 'error', c: 'error' }, - { n: 'warn', c: 'warn' }, - { n: 'info', c: 'info' }, - { n: 'debug', c: 'debug' }, - { n: 'verbose', c: 'trace' }, -]; -/** - * A simple Immutable Console based diagnostic logger which will output any messages to the Console. - * If you want to limit the amount of logging to a specific level or lower use the - * {@link createLogLevelDiagLogger} - */ -var DiagConsoleLogger = /** @class */ (function () { - function DiagConsoleLogger() { - function _consoleFunc(funcName) { - return function () { - var args = []; - for (var _i = 0; _i < arguments.length; _i++) { - args[_i] = arguments[_i]; - } - if (console) { - // Some environments only expose the console when the F12 developer console is open - // eslint-disable-next-line no-console - var theFunc = console[funcName]; - if (typeof theFunc !== 'function') { - // Not all environments support all functions - // eslint-disable-next-line no-console - theFunc = console.log; - } - // One last final check - if (typeof theFunc === 'function') { - return theFunc.apply(console, args); - } - } - }; - } - for (var i = 0; i < consoleMap.length; i++) { - this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); - } - } - return DiagConsoleLogger; -}()); -export { DiagConsoleLogger }; -//# sourceMappingURL=consoleLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js deleted file mode 100644 index aedab38dc..000000000 --- a/node_modules/@opentelemetry/api/build/esm/diag/internal/logLevelLogger.js +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { DiagLogLevel } from '../types'; -export function createLogLevelDiagLogger(maxLevel, logger) { - if (maxLevel < DiagLogLevel.NONE) { - maxLevel = DiagLogLevel.NONE; - } - else if (maxLevel > DiagLogLevel.ALL) { - maxLevel = DiagLogLevel.ALL; - } - // In case the logger is null or undefined - logger = logger || {}; - function _filterFunc(funcName, theLevel) { - var theFunc = logger[funcName]; - if (typeof theFunc === 'function' && maxLevel >= theLevel) { - return theFunc.bind(logger); - } - return function () { }; - } - return { - error: _filterFunc('error', DiagLogLevel.ERROR), - warn: _filterFunc('warn', DiagLogLevel.WARN), - info: _filterFunc('info', DiagLogLevel.INFO), - debug: _filterFunc('debug', DiagLogLevel.DEBUG), - verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE), - }; -} -//# sourceMappingURL=logLevelLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js b/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js deleted file mode 100644 index 7d5ba63d8..000000000 --- a/node_modules/@opentelemetry/api/build/esm/diag/internal/noopLogger.js +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -function noopLogFunction() { } -/** - * Returns a No-Op Diagnostic logger where all messages do nothing. - * @implements {@link DiagLogger} - * @returns {DiagLogger} - */ -export function createNoopDiagLogger() { - return { - verbose: noopLogFunction, - debug: noopLogFunction, - info: noopLogFunction, - warn: noopLogFunction, - error: noopLogFunction, - }; -} -//# sourceMappingURL=noopLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/diag/types.js b/node_modules/@opentelemetry/api/build/esm/diag/types.js deleted file mode 100644 index 306585e83..000000000 --- a/node_modules/@opentelemetry/api/build/esm/diag/types.js +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * Defines the available internal logging levels for the diagnostic logger, the numeric values - * of the levels are defined to match the original values from the initial LogLevel to avoid - * compatibility/migration issues for any implementation that assume the numeric ordering. - */ -export var DiagLogLevel; -(function (DiagLogLevel) { - /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ - DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; - /** Identifies an error scenario */ - DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; - /** Identifies a warning scenario */ - DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; - /** General informational log message */ - DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; - /** General debug log message */ - DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; - /** - * Detailed trace level logging should only be used for development, should only be set - * in a development environment. - */ - DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; - /** Used to set the logging level to include all logging */ - DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; -})(DiagLogLevel || (DiagLogLevel = {})); -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/experimental/index.js b/node_modules/@opentelemetry/api/build/esm/experimental/index.js deleted file mode 100644 index 8400e49f5..000000000 --- a/node_modules/@opentelemetry/api/build/esm/experimental/index.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export { wrapTracer, SugaredTracer } from './trace/SugaredTracer'; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredOptions.js b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredOptions.js deleted file mode 100644 index 0c6a2bd4e..000000000 --- a/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredOptions.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=SugaredOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredTracer.js b/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredTracer.js deleted file mode 100644 index 6fb98ded2..000000000 --- a/node_modules/@opentelemetry/api/build/esm/experimental/trace/SugaredTracer.js +++ /dev/null @@ -1,92 +0,0 @@ -import { context, SpanStatusCode } from '../../'; -var defaultOnException = function (e, span) { - span.recordException(e); - span.setStatus({ - code: SpanStatusCode.ERROR, - }); -}; -/** - * return a new SugaredTracer created from the supplied one - * @param tracer - */ -export function wrapTracer(tracer) { - return new SugaredTracer(tracer); -} -var SugaredTracer = /** @class */ (function () { - function SugaredTracer(tracer) { - this._tracer = tracer; - this.startSpan = tracer.startSpan.bind(this._tracer); - this.startActiveSpan = tracer.startActiveSpan.bind(this._tracer); - } - SugaredTracer.prototype.withActiveSpan = function (name, arg2, arg3, arg4) { - var _a = massageParams(arg2, arg3, arg4), opts = _a.opts, ctx = _a.ctx, fn = _a.fn; - return this._tracer.startActiveSpan(name, opts, ctx, function (span) { - return handleFn(span, opts, fn); - }); - }; - SugaredTracer.prototype.withSpan = function (name, arg2, arg3, arg4) { - var _a = massageParams(arg2, arg3, arg4), opts = _a.opts, ctx = _a.ctx, fn = _a.fn; - var span = this._tracer.startSpan(name, opts, ctx); - return handleFn(span, opts, fn); - }; - return SugaredTracer; -}()); -export { SugaredTracer }; -/** - * Massages parameters of withSpan and withActiveSpan to allow signature overwrites - * @param arg - * @param arg2 - * @param arg3 - */ -function massageParams(arg, arg2, arg3) { - var opts; - var ctx; - var fn; - if (!arg2 && !arg3) { - fn = arg; - } - else if (!arg3) { - opts = arg; - fn = arg2; - } - else { - opts = arg; - ctx = arg2; - fn = arg3; - } - opts = opts !== null && opts !== void 0 ? opts : {}; - ctx = ctx !== null && ctx !== void 0 ? ctx : context.active(); - return { opts: opts, ctx: ctx, fn: fn }; -} -/** - * Executes fn, returns results and runs onException in the case of exception to allow overwriting of error handling - * @param span - * @param opts - * @param fn - */ -function handleFn(span, opts, fn) { - var _a; - var onException = (_a = opts.onException) !== null && _a !== void 0 ? _a : defaultOnException; - var errorHandler = function (e) { - onException(e, span); - span.end(); - throw e; - }; - try { - var ret = fn(span); - // if fn is an async function, attach a recordException and spanEnd callback to the promise - if (typeof (ret === null || ret === void 0 ? void 0 : ret.then) === 'function') { - return ret.then(function (val) { - span.end(); - return val; - }, errorHandler); - } - span.end(); - return ret; - } - catch (e) { - // add throw to signal the compiler that this will throw in the inner scope - throw errorHandler(e); - } -} -//# sourceMappingURL=SugaredTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/index.js b/node_modules/@opentelemetry/api/build/esm/index.js deleted file mode 100644 index 70cd87012..000000000 --- a/node_modules/@opentelemetry/api/build/esm/index.js +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export { baggageEntryMetadataFromString } from './baggage/utils'; -// Context APIs -export { createContextKey, ROOT_CONTEXT } from './context/context'; -// Diag APIs -export { DiagConsoleLogger } from './diag/consoleLogger'; -export { DiagLogLevel, } from './diag/types'; -// Metrics APIs -export { createNoopMeter } from './metrics/NoopMeter'; -export { ValueType, } from './metrics/Metric'; -// Propagation APIs -export { defaultTextMapGetter, defaultTextMapSetter, } from './propagation/TextMapPropagator'; -export { ProxyTracer } from './trace/ProxyTracer'; -export { ProxyTracerProvider } from './trace/ProxyTracerProvider'; -export { SamplingDecision } from './trace/SamplingResult'; -export { SpanKind } from './trace/span_kind'; -export { SpanStatusCode } from './trace/status'; -export { TraceFlags } from './trace/trace_flags'; -export { createTraceState } from './trace/internal/utils'; -export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; -export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -import { context } from './context-api'; -import { diag } from './diag-api'; -import { metrics } from './metrics-api'; -import { propagation } from './propagation-api'; -import { trace } from './trace-api'; -// Named export. -export { context, diag, metrics, propagation, trace }; -// Default export. -export default { - context: context, - diag: diag, - metrics: metrics, - propagation: propagation, - trace: trace, -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js b/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js deleted file mode 100644 index 88e82a194..000000000 --- a/node_modules/@opentelemetry/api/build/esm/internal/global-utils.js +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { _globalThis } from '../platform'; -import { VERSION } from '../version'; -import { isCompatible } from './semver'; -var major = VERSION.split('.')[0]; -var GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for("opentelemetry.js.api." + major); -var _global = _globalThis; -export function registerGlobal(type, instance, diag, allowOverride) { - var _a; - if (allowOverride === void 0) { allowOverride = false; } - var api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { - version: VERSION, - }); - if (!allowOverride && api[type]) { - // already registered an API of this type - var err = new Error("@opentelemetry/api: Attempted duplicate registration of API: " + type); - diag.error(err.stack || err.message); - return false; - } - if (api.version !== VERSION) { - // All registered APIs must be of the same version exactly - var err = new Error("@opentelemetry/api: Registration of version v" + api.version + " for " + type + " does not match previously registered API v" + VERSION); - diag.error(err.stack || err.message); - return false; - } - api[type] = instance; - diag.debug("@opentelemetry/api: Registered a global for " + type + " v" + VERSION + "."); - return true; -} -export function getGlobal(type) { - var _a, _b; - var globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; - if (!globalVersion || !isCompatible(globalVersion)) { - return; - } - return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; -} -export function unregisterGlobal(type, diag) { - diag.debug("@opentelemetry/api: Unregistering a global for " + type + " v" + VERSION + "."); - var api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; - if (api) { - delete api[type]; - } -} -//# sourceMappingURL=global-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/internal/semver.js b/node_modules/@opentelemetry/api/build/esm/internal/semver.js deleted file mode 100644 index 2a788a0b8..000000000 --- a/node_modules/@opentelemetry/api/build/esm/internal/semver.js +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { VERSION } from '../version'; -var re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; -/** - * Create a function to test an API version to see if it is compatible with the provided ownVersion. - * - * The returned function has the following semantics: - * - Exact match is always compatible - * - Major versions must match exactly - * - 1.x package cannot use global 2.x package - * - 2.x package cannot use global 1.x package - * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API - * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects - * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 - * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor - * - Patch and build tag differences are not considered at this time - * - * @param ownVersion version which should be checked against - */ -export function _makeCompatibilityCheck(ownVersion) { - var acceptedVersions = new Set([ownVersion]); - var rejectedVersions = new Set(); - var myVersionMatch = ownVersion.match(re); - if (!myVersionMatch) { - // we cannot guarantee compatibility so we always return noop - return function () { return false; }; - } - var ownVersionParsed = { - major: +myVersionMatch[1], - minor: +myVersionMatch[2], - patch: +myVersionMatch[3], - prerelease: myVersionMatch[4], - }; - // if ownVersion has a prerelease tag, versions must match exactly - if (ownVersionParsed.prerelease != null) { - return function isExactmatch(globalVersion) { - return globalVersion === ownVersion; - }; - } - function _reject(v) { - rejectedVersions.add(v); - return false; - } - function _accept(v) { - acceptedVersions.add(v); - return true; - } - return function isCompatible(globalVersion) { - if (acceptedVersions.has(globalVersion)) { - return true; - } - if (rejectedVersions.has(globalVersion)) { - return false; - } - var globalVersionMatch = globalVersion.match(re); - if (!globalVersionMatch) { - // cannot parse other version - // we cannot guarantee compatibility so we always noop - return _reject(globalVersion); - } - var globalVersionParsed = { - major: +globalVersionMatch[1], - minor: +globalVersionMatch[2], - patch: +globalVersionMatch[3], - prerelease: globalVersionMatch[4], - }; - // if globalVersion has a prerelease tag, versions must match exactly - if (globalVersionParsed.prerelease != null) { - return _reject(globalVersion); - } - // major versions must match - if (ownVersionParsed.major !== globalVersionParsed.major) { - return _reject(globalVersion); - } - if (ownVersionParsed.major === 0) { - if (ownVersionParsed.minor === globalVersionParsed.minor && - ownVersionParsed.patch <= globalVersionParsed.patch) { - return _accept(globalVersion); - } - return _reject(globalVersion); - } - if (ownVersionParsed.minor <= globalVersionParsed.minor) { - return _accept(globalVersion); - } - return _reject(globalVersion); - }; -} -/** - * Test an API version to see if it is compatible with this API. - * - * - Exact match is always compatible - * - Major versions must match exactly - * - 1.x package cannot use global 2.x package - * - 2.x package cannot use global 1.x package - * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API - * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects - * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 - * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor - * - Patch and build tag differences are not considered at this time - * - * @param version version of the API requesting an instance of the global API - */ -export var isCompatible = _makeCompatibilityCheck(VERSION); -//# sourceMappingURL=semver.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics-api.js b/node_modules/@opentelemetry/api/build/esm/metrics-api.js deleted file mode 100644 index 145087f4f..000000000 --- a/node_modules/@opentelemetry/api/build/esm/metrics-api.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -import { MetricsAPI } from './api/metrics'; -/** Entrypoint for metrics API */ -export var metrics = MetricsAPI.getInstance(); -//# sourceMappingURL=metrics-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/Meter.js b/node_modules/@opentelemetry/api/build/esm/metrics/Meter.js deleted file mode 100644 index f1d0754d5..000000000 --- a/node_modules/@opentelemetry/api/build/esm/metrics/Meter.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=Meter.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/MeterProvider.js b/node_modules/@opentelemetry/api/build/esm/metrics/MeterProvider.js deleted file mode 100644 index 3051712f0..000000000 --- a/node_modules/@opentelemetry/api/build/esm/metrics/MeterProvider.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=MeterProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/Metric.js b/node_modules/@opentelemetry/api/build/esm/metrics/Metric.js deleted file mode 100644 index 6df137415..000000000 --- a/node_modules/@opentelemetry/api/build/esm/metrics/Metric.js +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** The Type of value. It describes how the data is reported. */ -export var ValueType; -(function (ValueType) { - ValueType[ValueType["INT"] = 0] = "INT"; - ValueType[ValueType["DOUBLE"] = 1] = "DOUBLE"; -})(ValueType || (ValueType = {})); -//# sourceMappingURL=Metric.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.js b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.js deleted file mode 100644 index a96fbe24f..000000000 --- a/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeter.js +++ /dev/null @@ -1,165 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __extends = (this && this.__extends) || (function () { - var extendStatics = function (d, b) { - extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - return extendStatics(d, b); - }; - return function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; -})(); -/** - * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses - * constant NoopMetrics for all of its methods. - */ -var NoopMeter = /** @class */ (function () { - function NoopMeter() { - } - /** - * @see {@link Meter.createHistogram} - */ - NoopMeter.prototype.createHistogram = function (_name, _options) { - return NOOP_HISTOGRAM_METRIC; - }; - /** - * @see {@link Meter.createCounter} - */ - NoopMeter.prototype.createCounter = function (_name, _options) { - return NOOP_COUNTER_METRIC; - }; - /** - * @see {@link Meter.createUpDownCounter} - */ - NoopMeter.prototype.createUpDownCounter = function (_name, _options) { - return NOOP_UP_DOWN_COUNTER_METRIC; - }; - /** - * @see {@link Meter.createObservableGauge} - */ - NoopMeter.prototype.createObservableGauge = function (_name, _options) { - return NOOP_OBSERVABLE_GAUGE_METRIC; - }; - /** - * @see {@link Meter.createObservableCounter} - */ - NoopMeter.prototype.createObservableCounter = function (_name, _options) { - return NOOP_OBSERVABLE_COUNTER_METRIC; - }; - /** - * @see {@link Meter.createObservableUpDownCounter} - */ - NoopMeter.prototype.createObservableUpDownCounter = function (_name, _options) { - return NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC; - }; - /** - * @see {@link Meter.addBatchObservableCallback} - */ - NoopMeter.prototype.addBatchObservableCallback = function (_callback, _observables) { }; - /** - * @see {@link Meter.removeBatchObservableCallback} - */ - NoopMeter.prototype.removeBatchObservableCallback = function (_callback) { }; - return NoopMeter; -}()); -export { NoopMeter }; -var NoopMetric = /** @class */ (function () { - function NoopMetric() { - } - return NoopMetric; -}()); -export { NoopMetric }; -var NoopCounterMetric = /** @class */ (function (_super) { - __extends(NoopCounterMetric, _super); - function NoopCounterMetric() { - return _super !== null && _super.apply(this, arguments) || this; - } - NoopCounterMetric.prototype.add = function (_value, _attributes) { }; - return NoopCounterMetric; -}(NoopMetric)); -export { NoopCounterMetric }; -var NoopUpDownCounterMetric = /** @class */ (function (_super) { - __extends(NoopUpDownCounterMetric, _super); - function NoopUpDownCounterMetric() { - return _super !== null && _super.apply(this, arguments) || this; - } - NoopUpDownCounterMetric.prototype.add = function (_value, _attributes) { }; - return NoopUpDownCounterMetric; -}(NoopMetric)); -export { NoopUpDownCounterMetric }; -var NoopHistogramMetric = /** @class */ (function (_super) { - __extends(NoopHistogramMetric, _super); - function NoopHistogramMetric() { - return _super !== null && _super.apply(this, arguments) || this; - } - NoopHistogramMetric.prototype.record = function (_value, _attributes) { }; - return NoopHistogramMetric; -}(NoopMetric)); -export { NoopHistogramMetric }; -var NoopObservableMetric = /** @class */ (function () { - function NoopObservableMetric() { - } - NoopObservableMetric.prototype.addCallback = function (_callback) { }; - NoopObservableMetric.prototype.removeCallback = function (_callback) { }; - return NoopObservableMetric; -}()); -export { NoopObservableMetric }; -var NoopObservableCounterMetric = /** @class */ (function (_super) { - __extends(NoopObservableCounterMetric, _super); - function NoopObservableCounterMetric() { - return _super !== null && _super.apply(this, arguments) || this; - } - return NoopObservableCounterMetric; -}(NoopObservableMetric)); -export { NoopObservableCounterMetric }; -var NoopObservableGaugeMetric = /** @class */ (function (_super) { - __extends(NoopObservableGaugeMetric, _super); - function NoopObservableGaugeMetric() { - return _super !== null && _super.apply(this, arguments) || this; - } - return NoopObservableGaugeMetric; -}(NoopObservableMetric)); -export { NoopObservableGaugeMetric }; -var NoopObservableUpDownCounterMetric = /** @class */ (function (_super) { - __extends(NoopObservableUpDownCounterMetric, _super); - function NoopObservableUpDownCounterMetric() { - return _super !== null && _super.apply(this, arguments) || this; - } - return NoopObservableUpDownCounterMetric; -}(NoopObservableMetric)); -export { NoopObservableUpDownCounterMetric }; -export var NOOP_METER = new NoopMeter(); -// Synchronous instruments -export var NOOP_COUNTER_METRIC = new NoopCounterMetric(); -export var NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric(); -export var NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric(); -// Asynchronous instruments -export var NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric(); -export var NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric(); -export var NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = new NoopObservableUpDownCounterMetric(); -/** - * Create a no-op Meter - */ -export function createNoopMeter() { - return NOOP_METER; -} -//# sourceMappingURL=NoopMeter.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.js b/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.js deleted file mode 100644 index 75de3c02b..000000000 --- a/node_modules/@opentelemetry/api/build/esm/metrics/NoopMeterProvider.js +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { NOOP_METER } from './NoopMeter'; -/** - * An implementation of the {@link MeterProvider} which returns an impotent Meter - * for all calls to `getMeter` - */ -var NoopMeterProvider = /** @class */ (function () { - function NoopMeterProvider() { - } - NoopMeterProvider.prototype.getMeter = function (_name, _version, _options) { - return NOOP_METER; - }; - return NoopMeterProvider; -}()); -export { NoopMeterProvider }; -export var NOOP_METER_PROVIDER = new NoopMeterProvider(); -//# sourceMappingURL=NoopMeterProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/metrics/ObservableResult.js b/node_modules/@opentelemetry/api/build/esm/metrics/ObservableResult.js deleted file mode 100644 index 7985d26dd..000000000 --- a/node_modules/@opentelemetry/api/build/esm/metrics/ObservableResult.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=ObservableResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js b/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js deleted file mode 100644 index 52f985e20..000000000 --- a/node_modules/@opentelemetry/api/build/esm/platform/browser/globalThis.js +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// Updates to this file should also be replicated to @opentelemetry/core too. -/** - * - globalThis (New standard) - * - self (Will return the current window instance for supported browsers) - * - window (fallback for older browser implementations) - * - global (NodeJS implementation) - * - (When all else fails) - */ -/** only globals that common to node and browsers are allowed */ -// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef -export var _globalThis = typeof globalThis === 'object' - ? globalThis - : typeof self === 'object' - ? self - : typeof window === 'object' - ? window - : typeof global === 'object' - ? global - : {}; -//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js b/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js deleted file mode 100644 index efcad2e9c..000000000 --- a/node_modules/@opentelemetry/api/build/esm/platform/browser/index.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export * from './globalThis'; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/index.js b/node_modules/@opentelemetry/api/build/esm/platform/index.js deleted file mode 100644 index c0df125ca..000000000 --- a/node_modules/@opentelemetry/api/build/esm/platform/index.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export * from './node'; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js b/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js deleted file mode 100644 index feb970007..000000000 --- a/node_modules/@opentelemetry/api/build/esm/platform/node/globalThis.js +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** only globals that common to node and browsers are allowed */ -// eslint-disable-next-line node/no-unsupported-features/es-builtins -export var _globalThis = typeof globalThis === 'object' ? globalThis : global; -//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/platform/node/index.js b/node_modules/@opentelemetry/api/build/esm/platform/node/index.js deleted file mode 100644 index efcad2e9c..000000000 --- a/node_modules/@opentelemetry/api/build/esm/platform/node/index.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export * from './globalThis'; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation-api.js b/node_modules/@opentelemetry/api/build/esm/propagation-api.js deleted file mode 100644 index df8d5b9d7..000000000 --- a/node_modules/@opentelemetry/api/build/esm/propagation-api.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -import { PropagationAPI } from './api/propagation'; -/** Entrypoint for propagation API */ -export var propagation = PropagationAPI.getInstance(); -//# sourceMappingURL=propagation-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js b/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js deleted file mode 100644 index 8e6290120..000000000 --- a/node_modules/@opentelemetry/api/build/esm/propagation/NoopTextMapPropagator.js +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * No-op implementations of {@link TextMapPropagator}. - */ -var NoopTextMapPropagator = /** @class */ (function () { - function NoopTextMapPropagator() { - } - /** Noop inject function does nothing */ - NoopTextMapPropagator.prototype.inject = function (_context, _carrier) { }; - /** Noop extract function does nothing and returns the input context */ - NoopTextMapPropagator.prototype.extract = function (context, _carrier) { - return context; - }; - NoopTextMapPropagator.prototype.fields = function () { - return []; - }; - return NoopTextMapPropagator; -}()); -export { NoopTextMapPropagator }; -//# sourceMappingURL=NoopTextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js b/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js deleted file mode 100644 index c5f5311fb..000000000 --- a/node_modules/@opentelemetry/api/build/esm/propagation/TextMapPropagator.js +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export var defaultTextMapGetter = { - get: function (carrier, key) { - if (carrier == null) { - return undefined; - } - return carrier[key]; - }, - keys: function (carrier) { - if (carrier == null) { - return []; - } - return Object.keys(carrier); - }, -}; -export var defaultTextMapSetter = { - set: function (carrier, key, value) { - if (carrier == null) { - return; - } - carrier[key] = value; - }, -}; -//# sourceMappingURL=TextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace-api.js b/node_modules/@opentelemetry/api/build/esm/trace-api.js deleted file mode 100644 index 57506e51a..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace-api.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -import { TraceAPI } from './api/trace'; -/** Entrypoint for trace API */ -export var trace = TraceAPI.getInstance(); -//# sourceMappingURL=trace-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js b/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js deleted file mode 100644 index ba2e0ff5c..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/NonRecordingSpan.js +++ /dev/null @@ -1,62 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { INVALID_SPAN_CONTEXT } from './invalid-span-constants'; -/** - * The NonRecordingSpan is the default {@link Span} that is used when no Span - * implementation is available. All operations are no-op including context - * propagation. - */ -var NonRecordingSpan = /** @class */ (function () { - function NonRecordingSpan(_spanContext) { - if (_spanContext === void 0) { _spanContext = INVALID_SPAN_CONTEXT; } - this._spanContext = _spanContext; - } - // Returns a SpanContext. - NonRecordingSpan.prototype.spanContext = function () { - return this._spanContext; - }; - // By default does nothing - NonRecordingSpan.prototype.setAttribute = function (_key, _value) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.setAttributes = function (_attributes) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.addEvent = function (_name, _attributes) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.setStatus = function (_status) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.updateName = function (_name) { - return this; - }; - // By default does nothing - NonRecordingSpan.prototype.end = function (_endTime) { }; - // isRecording always returns false for NonRecordingSpan. - NonRecordingSpan.prototype.isRecording = function () { - return false; - }; - // By default does nothing - NonRecordingSpan.prototype.recordException = function (_exception, _time) { }; - return NonRecordingSpan; -}()); -export { NonRecordingSpan }; -//# sourceMappingURL=NonRecordingSpan.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js deleted file mode 100644 index 5a3a96957..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracer.js +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { ContextAPI } from '../api/context'; -import { getSpanContext, setSpan } from '../trace/context-utils'; -import { NonRecordingSpan } from './NonRecordingSpan'; -import { isSpanContextValid } from './spancontext-utils'; -var contextApi = ContextAPI.getInstance(); -/** - * No-op implementations of {@link Tracer}. - */ -var NoopTracer = /** @class */ (function () { - function NoopTracer() { - } - // startSpan starts a noop span. - NoopTracer.prototype.startSpan = function (name, options, context) { - if (context === void 0) { context = contextApi.active(); } - var root = Boolean(options === null || options === void 0 ? void 0 : options.root); - if (root) { - return new NonRecordingSpan(); - } - var parentFromContext = context && getSpanContext(context); - if (isSpanContext(parentFromContext) && - isSpanContextValid(parentFromContext)) { - return new NonRecordingSpan(parentFromContext); - } - else { - return new NonRecordingSpan(); - } - }; - NoopTracer.prototype.startActiveSpan = function (name, arg2, arg3, arg4) { - var opts; - var ctx; - var fn; - if (arguments.length < 2) { - return; - } - else if (arguments.length === 2) { - fn = arg2; - } - else if (arguments.length === 3) { - opts = arg2; - fn = arg3; - } - else { - opts = arg2; - ctx = arg3; - fn = arg4; - } - var parentContext = ctx !== null && ctx !== void 0 ? ctx : contextApi.active(); - var span = this.startSpan(name, opts, parentContext); - var contextWithSpanSet = setSpan(parentContext, span); - return contextApi.with(contextWithSpanSet, fn, undefined, span); - }; - return NoopTracer; -}()); -export { NoopTracer }; -function isSpanContext(spanContext) { - return (typeof spanContext === 'object' && - typeof spanContext['spanId'] === 'string' && - typeof spanContext['traceId'] === 'string' && - typeof spanContext['traceFlags'] === 'number'); -} -//# sourceMappingURL=NoopTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js b/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js deleted file mode 100644 index 14d44c22b..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/NoopTracerProvider.js +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { NoopTracer } from './NoopTracer'; -/** - * An implementation of the {@link TracerProvider} which returns an impotent - * Tracer for all calls to `getTracer`. - * - * All operations are no-op. - */ -var NoopTracerProvider = /** @class */ (function () { - function NoopTracerProvider() { - } - NoopTracerProvider.prototype.getTracer = function (_name, _version, _options) { - return new NoopTracer(); - }; - return NoopTracerProvider; -}()); -export { NoopTracerProvider }; -//# sourceMappingURL=NoopTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js deleted file mode 100644 index 341991b9e..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracer.js +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { NoopTracer } from './NoopTracer'; -var NOOP_TRACER = new NoopTracer(); -/** - * Proxy tracer provided by the proxy tracer provider - */ -var ProxyTracer = /** @class */ (function () { - function ProxyTracer(_provider, name, version, options) { - this._provider = _provider; - this.name = name; - this.version = version; - this.options = options; - } - ProxyTracer.prototype.startSpan = function (name, options, context) { - return this._getTracer().startSpan(name, options, context); - }; - ProxyTracer.prototype.startActiveSpan = function (_name, _options, _context, _fn) { - var tracer = this._getTracer(); - return Reflect.apply(tracer.startActiveSpan, tracer, arguments); - }; - /** - * Try to get a tracer from the proxy tracer provider. - * If the proxy tracer provider has no delegate, return a noop tracer. - */ - ProxyTracer.prototype._getTracer = function () { - if (this._delegate) { - return this._delegate; - } - var tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); - if (!tracer) { - return NOOP_TRACER; - } - this._delegate = tracer; - return this._delegate; - }; - return ProxyTracer; -}()); -export { ProxyTracer }; -//# sourceMappingURL=ProxyTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js b/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js deleted file mode 100644 index 3cc735c90..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/ProxyTracerProvider.js +++ /dev/null @@ -1,54 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { ProxyTracer } from './ProxyTracer'; -import { NoopTracerProvider } from './NoopTracerProvider'; -var NOOP_TRACER_PROVIDER = new NoopTracerProvider(); -/** - * Tracer provider which provides {@link ProxyTracer}s. - * - * Before a delegate is set, tracers provided are NoOp. - * When a delegate is set, traces are provided from the delegate. - * When a delegate is set after tracers have already been provided, - * all tracers already provided will use the provided delegate implementation. - */ -var ProxyTracerProvider = /** @class */ (function () { - function ProxyTracerProvider() { - } - /** - * Get a {@link ProxyTracer} - */ - ProxyTracerProvider.prototype.getTracer = function (name, version, options) { - var _a; - return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer(this, name, version, options)); - }; - ProxyTracerProvider.prototype.getDelegate = function () { - var _a; - return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; - }; - /** - * Set the delegate tracer provider - */ - ProxyTracerProvider.prototype.setDelegate = function (delegate) { - this._delegate = delegate; - }; - ProxyTracerProvider.prototype.getDelegateTracer = function (name, version, options) { - var _a; - return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); - }; - return ProxyTracerProvider; -}()); -export { ProxyTracerProvider }; -//# sourceMappingURL=ProxyTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js b/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js deleted file mode 100644 index 22a60a127..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/Sampler.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=Sampler.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js b/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js deleted file mode 100644 index be657416b..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/SamplingResult.js +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. - * A sampling decision that determines how a {@link Span} will be recorded - * and collected. - */ -export var SamplingDecision; -(function (SamplingDecision) { - /** - * `Span.isRecording() === false`, span will not be recorded and all events - * and attributes will be dropped. - */ - SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; - /** - * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} - * MUST NOT be set. - */ - SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; - /** - * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} - * MUST be set. - */ - SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; -})(SamplingDecision || (SamplingDecision = {})); -//# sourceMappingURL=SamplingResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js b/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js deleted file mode 100644 index 06b42b151..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/SpanOptions.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=SpanOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/attributes.js b/node_modules/@opentelemetry/api/build/esm/trace/attributes.js deleted file mode 100644 index 6f1b9a3f8..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/attributes.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js b/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js deleted file mode 100644 index 4d776c380..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/context-utils.js +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { createContextKey } from '../context/context'; -import { NonRecordingSpan } from './NonRecordingSpan'; -import { ContextAPI } from '../api/context'; -/** - * span key - */ -var SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN'); -/** - * Return the span if one exists - * - * @param context context to get span from - */ -export function getSpan(context) { - return context.getValue(SPAN_KEY) || undefined; -} -/** - * Gets the span from the current context, if one exists. - */ -export function getActiveSpan() { - return getSpan(ContextAPI.getInstance().active()); -} -/** - * Set the span on a context - * - * @param context context to use as parent - * @param span span to set active - */ -export function setSpan(context, span) { - return context.setValue(SPAN_KEY, span); -} -/** - * Remove current span stored in the context - * - * @param context context to delete span from - */ -export function deleteSpan(context) { - return context.deleteValue(SPAN_KEY); -} -/** - * Wrap span context in a NoopSpan and set as span in a new - * context - * - * @param context context to set active span on - * @param spanContext span context to be wrapped - */ -export function setSpanContext(context, spanContext) { - return setSpan(context, new NonRecordingSpan(spanContext)); -} -/** - * Get the span context of the span if it exists. - * - * @param context context to get values from - */ -export function getSpanContext(context) { - var _a; - return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); -} -//# sourceMappingURL=context-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js deleted file mode 100644 index 751406935..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-impl.js +++ /dev/null @@ -1,102 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { validateKey, validateValue } from './tracestate-validators'; -var MAX_TRACE_STATE_ITEMS = 32; -var MAX_TRACE_STATE_LEN = 512; -var LIST_MEMBERS_SEPARATOR = ','; -var LIST_MEMBER_KEY_VALUE_SPLITTER = '='; -/** - * TraceState must be a class and not a simple object type because of the spec - * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). - * - * Here is the list of allowed mutations: - * - New key-value pair should be added into the beginning of the list - * - The value of any key can be updated. Modified keys MUST be moved to the - * beginning of the list. - */ -var TraceStateImpl = /** @class */ (function () { - function TraceStateImpl(rawTraceState) { - this._internalState = new Map(); - if (rawTraceState) - this._parse(rawTraceState); - } - TraceStateImpl.prototype.set = function (key, value) { - // TODO: Benchmark the different approaches(map vs list) and - // use the faster one. - var traceState = this._clone(); - if (traceState._internalState.has(key)) { - traceState._internalState.delete(key); - } - traceState._internalState.set(key, value); - return traceState; - }; - TraceStateImpl.prototype.unset = function (key) { - var traceState = this._clone(); - traceState._internalState.delete(key); - return traceState; - }; - TraceStateImpl.prototype.get = function (key) { - return this._internalState.get(key); - }; - TraceStateImpl.prototype.serialize = function () { - var _this = this; - return this._keys() - .reduce(function (agg, key) { - agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + _this.get(key)); - return agg; - }, []) - .join(LIST_MEMBERS_SEPARATOR); - }; - TraceStateImpl.prototype._parse = function (rawTraceState) { - if (rawTraceState.length > MAX_TRACE_STATE_LEN) - return; - this._internalState = rawTraceState - .split(LIST_MEMBERS_SEPARATOR) - .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning - .reduce(function (agg, part) { - var listMember = part.trim(); // Optional Whitespace (OWS) handling - var i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); - if (i !== -1) { - var key = listMember.slice(0, i); - var value = listMember.slice(i + 1, part.length); - if (validateKey(key) && validateValue(value)) { - agg.set(key, value); - } - else { - // TODO: Consider to add warning log - } - } - return agg; - }, new Map()); - // Because of the reverse() requirement, trunc must be done after map is created - if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { - this._internalState = new Map(Array.from(this._internalState.entries()) - .reverse() // Use reverse same as original tracestate parse chain - .slice(0, MAX_TRACE_STATE_ITEMS)); - } - }; - TraceStateImpl.prototype._keys = function () { - return Array.from(this._internalState.keys()).reverse(); - }; - TraceStateImpl.prototype._clone = function () { - var traceState = new TraceStateImpl(); - traceState._internalState = new Map(this._internalState); - return traceState; - }; - return TraceStateImpl; -}()); -export { TraceStateImpl }; -//# sourceMappingURL=tracestate-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js b/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js deleted file mode 100644 index 1d3f14bc5..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/internal/tracestate-validators.js +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; -var VALID_KEY = "[a-z]" + VALID_KEY_CHAR_RANGE + "{0,255}"; -var VALID_VENDOR_KEY = "[a-z0-9]" + VALID_KEY_CHAR_RANGE + "{0,240}@[a-z]" + VALID_KEY_CHAR_RANGE + "{0,13}"; -var VALID_KEY_REGEX = new RegExp("^(?:" + VALID_KEY + "|" + VALID_VENDOR_KEY + ")$"); -var VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; -var INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; -/** - * Key is opaque string up to 256 characters printable. It MUST begin with a - * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, - * underscores _, dashes -, asterisks *, and forward slashes /. - * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the - * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. - * see https://www.w3.org/TR/trace-context/#key - */ -export function validateKey(key) { - return VALID_KEY_REGEX.test(key); -} -/** - * Value is opaque string up to 256 characters printable ASCII RFC0020 - * characters (i.e., the range 0x20 to 0x7E) except comma , and =. - */ -export function validateValue(value) { - return (VALID_VALUE_BASE_REGEX.test(value) && - !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); -} -//# sourceMappingURL=tracestate-validators.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js b/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js deleted file mode 100644 index feea46914..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/internal/utils.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { TraceStateImpl } from './tracestate-impl'; -export function createTraceState(rawTraceState) { - return new TraceStateImpl(rawTraceState); -} -//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js b/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js deleted file mode 100644 index 36dc1d62c..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/invalid-span-constants.js +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { TraceFlags } from './trace_flags'; -export var INVALID_SPANID = '0000000000000000'; -export var INVALID_TRACEID = '00000000000000000000000000000000'; -export var INVALID_SPAN_CONTEXT = { - traceId: INVALID_TRACEID, - spanId: INVALID_SPANID, - traceFlags: TraceFlags.NONE, -}; -//# sourceMappingURL=invalid-span-constants.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/link.js b/node_modules/@opentelemetry/api/build/esm/trace/link.js deleted file mode 100644 index 7c8accbe1..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/link.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=link.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span.js b/node_modules/@opentelemetry/api/build/esm/trace/span.js deleted file mode 100644 index f41c7f6f5..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/span.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=span.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_context.js b/node_modules/@opentelemetry/api/build/esm/trace/span_context.js deleted file mode 100644 index 1bb88b0d7..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/span_context.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=span_context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js b/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js deleted file mode 100644 index 1119df92f..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/span_kind.js +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export var SpanKind; -(function (SpanKind) { - /** Default value. Indicates that the span is used internally. */ - SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; - /** - * Indicates that the span covers server-side handling of an RPC or other - * remote request. - */ - SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; - /** - * Indicates that the span covers the client-side wrapper around an RPC or - * other remote request. - */ - SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; - /** - * Indicates that the span describes producer sending a message to a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; - /** - * Indicates that the span describes consumer receiving a message from a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; -})(SpanKind || (SpanKind = {})); -//# sourceMappingURL=span_kind.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js b/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js deleted file mode 100644 index 88545bb58..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/spancontext-utils.js +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants'; -import { NonRecordingSpan } from './NonRecordingSpan'; -var VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; -var VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; -export function isValidTraceId(traceId) { - return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID; -} -export function isValidSpanId(spanId) { - return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID; -} -/** - * Returns true if this {@link SpanContext} is valid. - * @return true if this {@link SpanContext} is valid. - */ -export function isSpanContextValid(spanContext) { - return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); -} -/** - * Wrap the given {@link SpanContext} in a new non-recording {@link Span} - * - * @param spanContext span context to be wrapped - * @returns a new non-recording {@link Span} with the provided context - */ -export function wrapSpanContext(spanContext) { - return new NonRecordingSpan(spanContext); -} -//# sourceMappingURL=spancontext-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/status.js b/node_modules/@opentelemetry/api/build/esm/trace/status.js deleted file mode 100644 index 5ee55e42f..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/status.js +++ /dev/null @@ -1,20 +0,0 @@ -/** - * An enumeration of status codes. - */ -export var SpanStatusCode; -(function (SpanStatusCode) { - /** - * The default status. - */ - SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; - /** - * The operation has been validated by an Application developer or - * Operator to have completed successfully. - */ - SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; - /** - * The operation contains an error. - */ - SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; -})(SpanStatusCode || (SpanStatusCode = {})); -//# sourceMappingURL=status.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js b/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js deleted file mode 100644 index 8a7b00072..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/trace_flags.js +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export var TraceFlags; -(function (TraceFlags) { - /** Represents no flag set. */ - TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; - /** Bit to represent whether trace is sampled in trace flags. */ - TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; -})(TraceFlags || (TraceFlags = {})); -//# sourceMappingURL=trace_flags.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js b/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js deleted file mode 100644 index a6c368f88..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/trace_state.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=trace_state.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer.js b/node_modules/@opentelemetry/api/build/esm/trace/tracer.js deleted file mode 100644 index ad066dc3f..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/tracer.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=tracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js b/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js deleted file mode 100644 index 470a3a732..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/tracer_options.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=tracer_options.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js b/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js deleted file mode 100644 index adf432a65..000000000 --- a/node_modules/@opentelemetry/api/build/esm/trace/tracer_provider.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=tracer_provider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esm/version.js b/node_modules/@opentelemetry/api/build/esm/version.js deleted file mode 100644 index ba175da37..000000000 --- a/node_modules/@opentelemetry/api/build/esm/version.js +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// this is autogenerated file, see scripts/version-update.js -export var VERSION = '1.8.0'; -//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/context.js b/node_modules/@opentelemetry/api/build/esnext/api/context.js deleted file mode 100644 index c672a419f..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/api/context.js +++ /dev/null @@ -1,77 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { NoopContextManager } from '../context/NoopContextManager'; -import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; -import { DiagAPI } from './diag'; -const API_NAME = 'context'; -const NOOP_CONTEXT_MANAGER = new NoopContextManager(); -/** - * Singleton object which represents the entry point to the OpenTelemetry Context API - */ -export class ContextAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { } - /** Get the singleton instance of the Context API */ - static getInstance() { - if (!this._instance) { - this._instance = new ContextAPI(); - } - return this._instance; - } - /** - * Set the current context manager. - * - * @returns true if the context manager was successfully registered, else false - */ - setGlobalContextManager(contextManager) { - return registerGlobal(API_NAME, contextManager, DiagAPI.instance()); - } - /** - * Get the currently active context - */ - active() { - return this._getContextManager().active(); - } - /** - * Execute a function with an active context - * - * @param context context to be active during function execution - * @param fn function to execute in a context - * @param thisArg optional receiver to be used for calling fn - * @param args optional arguments forwarded to fn - */ - with(context, fn, thisArg, ...args) { - return this._getContextManager().with(context, fn, thisArg, ...args); - } - /** - * Bind a context to a target function or event emitter - * - * @param context context to bind to the event emitter or function. Defaults to the currently active context - * @param target function or event emitter to bind - */ - bind(context, target) { - return this._getContextManager().bind(context, target); - } - _getContextManager() { - return getGlobal(API_NAME) || NOOP_CONTEXT_MANAGER; - } - /** Disable and remove the global context manager */ - disable() { - this._getContextManager().disable(); - unregisterGlobal(API_NAME, DiagAPI.instance()); - } -} -//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/diag.js b/node_modules/@opentelemetry/api/build/esnext/api/diag.js deleted file mode 100644 index 8798a39f4..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/api/diag.js +++ /dev/null @@ -1,89 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { DiagComponentLogger } from '../diag/ComponentLogger'; -import { createLogLevelDiagLogger } from '../diag/internal/logLevelLogger'; -import { DiagLogLevel, } from '../diag/types'; -import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; -const API_NAME = 'diag'; -/** - * Singleton object which represents the entry point to the OpenTelemetry internal - * diagnostic API - */ -export class DiagAPI { - /** - * Private internal constructor - * @private - */ - constructor() { - function _logProxy(funcName) { - return function (...args) { - const logger = getGlobal('diag'); - // shortcut if logger not set - if (!logger) - return; - return logger[funcName](...args); - }; - } - // Using self local variable for minification purposes as 'this' cannot be minified - const self = this; - // DiagAPI specific functions - const setLogger = (logger, optionsOrLogLevel = { logLevel: DiagLogLevel.INFO }) => { - var _a, _b, _c; - if (logger === self) { - // There isn't much we can do here. - // Logging to the console might break the user application. - // Try to log to self. If a logger was previously registered it will receive the log. - const err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); - self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); - return false; - } - if (typeof optionsOrLogLevel === 'number') { - optionsOrLogLevel = { - logLevel: optionsOrLogLevel, - }; - } - const oldLogger = getGlobal('diag'); - const newLogger = createLogLevelDiagLogger((_b = optionsOrLogLevel.logLevel) !== null && _b !== void 0 ? _b : DiagLogLevel.INFO, logger); - // There already is an logger registered. We'll let it know before overwriting it. - if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) { - const stack = (_c = new Error().stack) !== null && _c !== void 0 ? _c : ''; - oldLogger.warn(`Current logger will be overwritten from ${stack}`); - newLogger.warn(`Current logger will overwrite one already registered from ${stack}`); - } - return registerGlobal('diag', newLogger, self, true); - }; - self.setLogger = setLogger; - self.disable = () => { - unregisterGlobal(API_NAME, self); - }; - self.createComponentLogger = (options) => { - return new DiagComponentLogger(options); - }; - self.verbose = _logProxy('verbose'); - self.debug = _logProxy('debug'); - self.info = _logProxy('info'); - self.warn = _logProxy('warn'); - self.error = _logProxy('error'); - } - /** Get the singleton instance of the DiagAPI API */ - static instance() { - if (!this._instance) { - this._instance = new DiagAPI(); - } - return this._instance; - } -} -//# sourceMappingURL=diag.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/metrics.js b/node_modules/@opentelemetry/api/build/esnext/api/metrics.js deleted file mode 100644 index 786492646..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/api/metrics.js +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { NOOP_METER_PROVIDER } from '../metrics/NoopMeterProvider'; -import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; -import { DiagAPI } from './diag'; -const API_NAME = 'metrics'; -/** - * Singleton object which represents the entry point to the OpenTelemetry Metrics API - */ -export class MetricsAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { } - /** Get the singleton instance of the Metrics API */ - static getInstance() { - if (!this._instance) { - this._instance = new MetricsAPI(); - } - return this._instance; - } - /** - * Set the current global meter provider. - * Returns true if the meter provider was successfully registered, else false. - */ - setGlobalMeterProvider(provider) { - return registerGlobal(API_NAME, provider, DiagAPI.instance()); - } - /** - * Returns the global meter provider. - */ - getMeterProvider() { - return getGlobal(API_NAME) || NOOP_METER_PROVIDER; - } - /** - * Returns a meter from the global meter provider. - */ - getMeter(name, version, options) { - return this.getMeterProvider().getMeter(name, version, options); - } - /** Remove the global meter provider */ - disable() { - unregisterGlobal(API_NAME, DiagAPI.instance()); - } -} -//# sourceMappingURL=metrics.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/propagation.js b/node_modules/@opentelemetry/api/build/esnext/api/propagation.js deleted file mode 100644 index 93507eb9f..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/api/propagation.js +++ /dev/null @@ -1,85 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; -import { NoopTextMapPropagator } from '../propagation/NoopTextMapPropagator'; -import { defaultTextMapGetter, defaultTextMapSetter, } from '../propagation/TextMapPropagator'; -import { getBaggage, getActiveBaggage, setBaggage, deleteBaggage, } from '../baggage/context-helpers'; -import { createBaggage } from '../baggage/utils'; -import { DiagAPI } from './diag'; -const API_NAME = 'propagation'; -const NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator(); -/** - * Singleton object which represents the entry point to the OpenTelemetry Propagation API - */ -export class PropagationAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { - this.createBaggage = createBaggage; - this.getBaggage = getBaggage; - this.getActiveBaggage = getActiveBaggage; - this.setBaggage = setBaggage; - this.deleteBaggage = deleteBaggage; - } - /** Get the singleton instance of the Propagator API */ - static getInstance() { - if (!this._instance) { - this._instance = new PropagationAPI(); - } - return this._instance; - } - /** - * Set the current propagator. - * - * @returns true if the propagator was successfully registered, else false - */ - setGlobalPropagator(propagator) { - return registerGlobal(API_NAME, propagator, DiagAPI.instance()); - } - /** - * Inject context into a carrier to be propagated inter-process - * - * @param context Context carrying tracing data to inject - * @param carrier carrier to inject context into - * @param setter Function used to set values on the carrier - */ - inject(context, carrier, setter = defaultTextMapSetter) { - return this._getGlobalPropagator().inject(context, carrier, setter); - } - /** - * Extract context from a carrier - * - * @param context Context which the newly created context will inherit from - * @param carrier Carrier to extract context from - * @param getter Function used to extract keys from a carrier - */ - extract(context, carrier, getter = defaultTextMapGetter) { - return this._getGlobalPropagator().extract(context, carrier, getter); - } - /** - * Return a list of all fields which may be used by the propagator. - */ - fields() { - return this._getGlobalPropagator().fields(); - } - /** Remove the global propagator */ - disable() { - unregisterGlobal(API_NAME, DiagAPI.instance()); - } - _getGlobalPropagator() { - return getGlobal(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; - } -} -//# sourceMappingURL=propagation.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/api/trace.js b/node_modules/@opentelemetry/api/build/esnext/api/trace.js deleted file mode 100644 index 60036200b..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/api/trace.js +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { getGlobal, registerGlobal, unregisterGlobal, } from '../internal/global-utils'; -import { ProxyTracerProvider } from '../trace/ProxyTracerProvider'; -import { isSpanContextValid, wrapSpanContext, } from '../trace/spancontext-utils'; -import { deleteSpan, getActiveSpan, getSpan, getSpanContext, setSpan, setSpanContext, } from '../trace/context-utils'; -import { DiagAPI } from './diag'; -const API_NAME = 'trace'; -/** - * Singleton object which represents the entry point to the OpenTelemetry Tracing API - */ -export class TraceAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { - this._proxyTracerProvider = new ProxyTracerProvider(); - this.wrapSpanContext = wrapSpanContext; - this.isSpanContextValid = isSpanContextValid; - this.deleteSpan = deleteSpan; - this.getSpan = getSpan; - this.getActiveSpan = getActiveSpan; - this.getSpanContext = getSpanContext; - this.setSpan = setSpan; - this.setSpanContext = setSpanContext; - } - /** Get the singleton instance of the Trace API */ - static getInstance() { - if (!this._instance) { - this._instance = new TraceAPI(); - } - return this._instance; - } - /** - * Set the current global tracer. - * - * @returns true if the tracer provider was successfully registered, else false - */ - setGlobalTracerProvider(provider) { - const success = registerGlobal(API_NAME, this._proxyTracerProvider, DiagAPI.instance()); - if (success) { - this._proxyTracerProvider.setDelegate(provider); - } - return success; - } - /** - * Returns the global tracer provider. - */ - getTracerProvider() { - return getGlobal(API_NAME) || this._proxyTracerProvider; - } - /** - * Returns a tracer from the global tracer provider. - */ - getTracer(name, version) { - return this.getTracerProvider().getTracer(name, version); - } - /** Remove the global tracer provider */ - disable() { - unregisterGlobal(API_NAME, DiagAPI.instance()); - this._proxyTracerProvider = new ProxyTracerProvider(); - } -} -//# sourceMappingURL=trace.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/context-helpers.js b/node_modules/@opentelemetry/api/build/esnext/baggage/context-helpers.js deleted file mode 100644 index 9cd0fe7ca..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/baggage/context-helpers.js +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { ContextAPI } from '../api/context'; -import { createContextKey } from '../context/context'; -/** - * Baggage key - */ -const BAGGAGE_KEY = createContextKey('OpenTelemetry Baggage Key'); -/** - * Retrieve the current baggage from the given context - * - * @param {Context} Context that manage all context values - * @returns {Baggage} Extracted baggage from the context - */ -export function getBaggage(context) { - return context.getValue(BAGGAGE_KEY) || undefined; -} -/** - * Retrieve the current baggage from the active/current context - * - * @returns {Baggage} Extracted baggage from the context - */ -export function getActiveBaggage() { - return getBaggage(ContextAPI.getInstance().active()); -} -/** - * Store a baggage in the given context - * - * @param {Context} Context that manage all context values - * @param {Baggage} baggage that will be set in the actual context - */ -export function setBaggage(context, baggage) { - return context.setValue(BAGGAGE_KEY, baggage); -} -/** - * Delete the baggage stored in the given context - * - * @param {Context} Context that manage all context values - */ -export function deleteBaggage(context) { - return context.deleteValue(BAGGAGE_KEY); -} -//# sourceMappingURL=context-helpers.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/internal/baggage-impl.js b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/baggage-impl.js deleted file mode 100644 index 774d1f8d9..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/baggage/internal/baggage-impl.js +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export class BaggageImpl { - constructor(entries) { - this._entries = entries ? new Map(entries) : new Map(); - } - getEntry(key) { - const entry = this._entries.get(key); - if (!entry) { - return undefined; - } - return Object.assign({}, entry); - } - getAllEntries() { - return Array.from(this._entries.entries()).map(([k, v]) => [k, v]); - } - setEntry(key, entry) { - const newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.set(key, entry); - return newBaggage; - } - removeEntry(key) { - const newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.delete(key); - return newBaggage; - } - removeEntries(...keys) { - const newBaggage = new BaggageImpl(this._entries); - for (const key of keys) { - newBaggage._entries.delete(key); - } - return newBaggage; - } - clear() { - return new BaggageImpl(); - } -} -//# sourceMappingURL=baggage-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/internal/symbol.js b/node_modules/@opentelemetry/api/build/esnext/baggage/internal/symbol.js deleted file mode 100644 index 22f5b25dd..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/baggage/internal/symbol.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * Symbol used to make BaggageEntryMetadata an opaque type - */ -export const baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); -//# sourceMappingURL=symbol.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/types.js b/node_modules/@opentelemetry/api/build/esnext/baggage/types.js deleted file mode 100644 index 928faad02..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/baggage/types.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/baggage/utils.js b/node_modules/@opentelemetry/api/build/esnext/baggage/utils.js deleted file mode 100644 index b44488332..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/baggage/utils.js +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { DiagAPI } from '../api/diag'; -import { BaggageImpl } from './internal/baggage-impl'; -import { baggageEntryMetadataSymbol } from './internal/symbol'; -const diag = DiagAPI.instance(); -/** - * Create a new Baggage with optional entries - * - * @param entries An array of baggage entries the new baggage should contain - */ -export function createBaggage(entries = {}) { - return new BaggageImpl(new Map(Object.entries(entries))); -} -/** - * Create a serializable BaggageEntryMetadata object from a string. - * - * @param str string metadata. Format is currently not defined by the spec and has no special meaning. - * - */ -export function baggageEntryMetadataFromString(str) { - if (typeof str !== 'string') { - diag.error(`Cannot create baggage metadata from unknown type: ${typeof str}`); - str = ''; - } - return { - __TYPE__: baggageEntryMetadataSymbol, - toString() { - return str; - }, - }; -} -//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/common/Attributes.js b/node_modules/@opentelemetry/api/build/esnext/common/Attributes.js deleted file mode 100644 index dbb1e4977..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/common/Attributes.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=Attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/common/Exception.js b/node_modules/@opentelemetry/api/build/esnext/common/Exception.js deleted file mode 100644 index 6522a8e65..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/common/Exception.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=Exception.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/common/Time.js b/node_modules/@opentelemetry/api/build/esnext/common/Time.js deleted file mode 100644 index 2abdf582f..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/common/Time.js +++ /dev/null @@ -1,2 +0,0 @@ -export {}; -//# sourceMappingURL=Time.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context-api.js b/node_modules/@opentelemetry/api/build/esnext/context-api.js deleted file mode 100644 index 582704332..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/context-api.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -import { ContextAPI } from './api/context'; -/** Entrypoint for context API */ -export const context = ContextAPI.getInstance(); -//# sourceMappingURL=context-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context/NoopContextManager.js b/node_modules/@opentelemetry/api/build/esnext/context/NoopContextManager.js deleted file mode 100644 index 14824a6b6..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/context/NoopContextManager.js +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { ROOT_CONTEXT } from './context'; -export class NoopContextManager { - active() { - return ROOT_CONTEXT; - } - with(_context, fn, thisArg, ...args) { - return fn.call(thisArg, ...args); - } - bind(_context, target) { - return target; - } - enable() { - return this; - } - disable() { - return this; - } -} -//# sourceMappingURL=NoopContextManager.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context/context.js b/node_modules/@opentelemetry/api/build/esnext/context/context.js deleted file mode 100644 index a95ecf034..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/context/context.js +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** Get a key to uniquely identify a context value */ -export function createContextKey(description) { - // The specification states that for the same input, multiple calls should - // return different keys. Due to the nature of the JS dependency management - // system, this creates problems where multiple versions of some package - // could hold different keys for the same property. - // - // Therefore, we use Symbol.for which returns the same key for the same input. - return Symbol.for(description); -} -class BaseContext { - /** - * Construct a new context which inherits values from an optional parent context. - * - * @param parentContext a context from which to inherit values - */ - constructor(parentContext) { - // for minification - const self = this; - self._currentContext = parentContext ? new Map(parentContext) : new Map(); - self.getValue = (key) => self._currentContext.get(key); - self.setValue = (key, value) => { - const context = new BaseContext(self._currentContext); - context._currentContext.set(key, value); - return context; - }; - self.deleteValue = (key) => { - const context = new BaseContext(self._currentContext); - context._currentContext.delete(key); - return context; - }; - } -} -/** The root context is used as the default parent context when there is no active context */ -export const ROOT_CONTEXT = new BaseContext(); -//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/context/types.js b/node_modules/@opentelemetry/api/build/esnext/context/types.js deleted file mode 100644 index 928faad02..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/context/types.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag-api.js b/node_modules/@opentelemetry/api/build/esnext/diag-api.js deleted file mode 100644 index 41d26584f..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/diag-api.js +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -import { DiagAPI } from './api/diag'; -/** - * Entrypoint for Diag API. - * Defines Diagnostic handler used for internal diagnostic logging operations. - * The default provides a Noop DiagLogger implementation which may be changed via the - * diag.setLogger(logger: DiagLogger) function. - */ -export const diag = DiagAPI.instance(); -//# sourceMappingURL=diag-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/ComponentLogger.js b/node_modules/@opentelemetry/api/build/esnext/diag/ComponentLogger.js deleted file mode 100644 index 1e21dbee8..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/diag/ComponentLogger.js +++ /dev/null @@ -1,55 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { getGlobal } from '../internal/global-utils'; -/** - * Component Logger which is meant to be used as part of any component which - * will add automatically additional namespace in front of the log message. - * It will then forward all message to global diag logger - * @example - * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); - * cLogger.debug('test'); - * // @opentelemetry/instrumentation-http test - */ -export class DiagComponentLogger { - constructor(props) { - this._namespace = props.namespace || 'DiagComponentLogger'; - } - debug(...args) { - return logProxy('debug', this._namespace, args); - } - error(...args) { - return logProxy('error', this._namespace, args); - } - info(...args) { - return logProxy('info', this._namespace, args); - } - warn(...args) { - return logProxy('warn', this._namespace, args); - } - verbose(...args) { - return logProxy('verbose', this._namespace, args); - } -} -function logProxy(funcName, namespace, args) { - const logger = getGlobal('diag'); - // shortcut if logger not set - if (!logger) { - return; - } - args.unshift(namespace); - return logger[funcName](...args); -} -//# sourceMappingURL=ComponentLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/consoleLogger.js b/node_modules/@opentelemetry/api/build/esnext/diag/consoleLogger.js deleted file mode 100644 index d81ea30cb..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/diag/consoleLogger.js +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -const consoleMap = [ - { n: 'error', c: 'error' }, - { n: 'warn', c: 'warn' }, - { n: 'info', c: 'info' }, - { n: 'debug', c: 'debug' }, - { n: 'verbose', c: 'trace' }, -]; -/** - * A simple Immutable Console based diagnostic logger which will output any messages to the Console. - * If you want to limit the amount of logging to a specific level or lower use the - * {@link createLogLevelDiagLogger} - */ -export class DiagConsoleLogger { - constructor() { - function _consoleFunc(funcName) { - return function (...args) { - if (console) { - // Some environments only expose the console when the F12 developer console is open - // eslint-disable-next-line no-console - let theFunc = console[funcName]; - if (typeof theFunc !== 'function') { - // Not all environments support all functions - // eslint-disable-next-line no-console - theFunc = console.log; - } - // One last final check - if (typeof theFunc === 'function') { - return theFunc.apply(console, args); - } - } - }; - } - for (let i = 0; i < consoleMap.length; i++) { - this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); - } - } -} -//# sourceMappingURL=consoleLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/internal/logLevelLogger.js b/node_modules/@opentelemetry/api/build/esnext/diag/internal/logLevelLogger.js deleted file mode 100644 index 6abf21b1d..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/diag/internal/logLevelLogger.js +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { DiagLogLevel } from '../types'; -export function createLogLevelDiagLogger(maxLevel, logger) { - if (maxLevel < DiagLogLevel.NONE) { - maxLevel = DiagLogLevel.NONE; - } - else if (maxLevel > DiagLogLevel.ALL) { - maxLevel = DiagLogLevel.ALL; - } - // In case the logger is null or undefined - logger = logger || {}; - function _filterFunc(funcName, theLevel) { - const theFunc = logger[funcName]; - if (typeof theFunc === 'function' && maxLevel >= theLevel) { - return theFunc.bind(logger); - } - return function () { }; - } - return { - error: _filterFunc('error', DiagLogLevel.ERROR), - warn: _filterFunc('warn', DiagLogLevel.WARN), - info: _filterFunc('info', DiagLogLevel.INFO), - debug: _filterFunc('debug', DiagLogLevel.DEBUG), - verbose: _filterFunc('verbose', DiagLogLevel.VERBOSE), - }; -} -//# sourceMappingURL=logLevelLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/internal/noopLogger.js b/node_modules/@opentelemetry/api/build/esnext/diag/internal/noopLogger.js deleted file mode 100644 index 7d5ba63d8..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/diag/internal/noopLogger.js +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -function noopLogFunction() { } -/** - * Returns a No-Op Diagnostic logger where all messages do nothing. - * @implements {@link DiagLogger} - * @returns {DiagLogger} - */ -export function createNoopDiagLogger() { - return { - verbose: noopLogFunction, - debug: noopLogFunction, - info: noopLogFunction, - warn: noopLogFunction, - error: noopLogFunction, - }; -} -//# sourceMappingURL=noopLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/diag/types.js b/node_modules/@opentelemetry/api/build/esnext/diag/types.js deleted file mode 100644 index 306585e83..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/diag/types.js +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * Defines the available internal logging levels for the diagnostic logger, the numeric values - * of the levels are defined to match the original values from the initial LogLevel to avoid - * compatibility/migration issues for any implementation that assume the numeric ordering. - */ -export var DiagLogLevel; -(function (DiagLogLevel) { - /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ - DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; - /** Identifies an error scenario */ - DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; - /** Identifies a warning scenario */ - DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; - /** General informational log message */ - DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; - /** General debug log message */ - DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; - /** - * Detailed trace level logging should only be used for development, should only be set - * in a development environment. - */ - DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; - /** Used to set the logging level to include all logging */ - DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; -})(DiagLogLevel || (DiagLogLevel = {})); -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/experimental/index.js b/node_modules/@opentelemetry/api/build/esnext/experimental/index.js deleted file mode 100644 index 8400e49f5..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/experimental/index.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export { wrapTracer, SugaredTracer } from './trace/SugaredTracer'; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredOptions.js b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredOptions.js deleted file mode 100644 index 0c6a2bd4e..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredOptions.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=SugaredOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredTracer.js b/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredTracer.js deleted file mode 100644 index a1edc7f7e..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/experimental/trace/SugaredTracer.js +++ /dev/null @@ -1,88 +0,0 @@ -import { context, SpanStatusCode } from '../../'; -const defaultOnException = (e, span) => { - span.recordException(e); - span.setStatus({ - code: SpanStatusCode.ERROR, - }); -}; -/** - * return a new SugaredTracer created from the supplied one - * @param tracer - */ -export function wrapTracer(tracer) { - return new SugaredTracer(tracer); -} -export class SugaredTracer { - constructor(tracer) { - this._tracer = tracer; - this.startSpan = tracer.startSpan.bind(this._tracer); - this.startActiveSpan = tracer.startActiveSpan.bind(this._tracer); - } - withActiveSpan(name, arg2, arg3, arg4) { - const { opts, ctx, fn } = massageParams(arg2, arg3, arg4); - return this._tracer.startActiveSpan(name, opts, ctx, (span) => handleFn(span, opts, fn)); - } - withSpan(name, arg2, arg3, arg4) { - const { opts, ctx, fn } = massageParams(arg2, arg3, arg4); - const span = this._tracer.startSpan(name, opts, ctx); - return handleFn(span, opts, fn); - } -} -/** - * Massages parameters of withSpan and withActiveSpan to allow signature overwrites - * @param arg - * @param arg2 - * @param arg3 - */ -function massageParams(arg, arg2, arg3) { - let opts; - let ctx; - let fn; - if (!arg2 && !arg3) { - fn = arg; - } - else if (!arg3) { - opts = arg; - fn = arg2; - } - else { - opts = arg; - ctx = arg2; - fn = arg3; - } - opts = opts !== null && opts !== void 0 ? opts : {}; - ctx = ctx !== null && ctx !== void 0 ? ctx : context.active(); - return { opts, ctx, fn }; -} -/** - * Executes fn, returns results and runs onException in the case of exception to allow overwriting of error handling - * @param span - * @param opts - * @param fn - */ -function handleFn(span, opts, fn) { - var _a; - const onException = (_a = opts.onException) !== null && _a !== void 0 ? _a : defaultOnException; - const errorHandler = (e) => { - onException(e, span); - span.end(); - throw e; - }; - try { - const ret = fn(span); - // if fn is an async function, attach a recordException and spanEnd callback to the promise - if (typeof (ret === null || ret === void 0 ? void 0 : ret.then) === 'function') { - return ret.then(val => { - span.end(); - return val; - }, errorHandler); - } - span.end(); - return ret; - } - catch (e) { - // add throw to signal the compiler that this will throw in the inner scope - throw errorHandler(e); - } -} -//# sourceMappingURL=SugaredTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/index.js b/node_modules/@opentelemetry/api/build/esnext/index.js deleted file mode 100644 index 123dc4d48..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/index.js +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export { baggageEntryMetadataFromString } from './baggage/utils'; -// Context APIs -export { createContextKey, ROOT_CONTEXT } from './context/context'; -// Diag APIs -export { DiagConsoleLogger } from './diag/consoleLogger'; -export { DiagLogLevel, } from './diag/types'; -// Metrics APIs -export { createNoopMeter } from './metrics/NoopMeter'; -export { ValueType, } from './metrics/Metric'; -// Propagation APIs -export { defaultTextMapGetter, defaultTextMapSetter, } from './propagation/TextMapPropagator'; -export { ProxyTracer } from './trace/ProxyTracer'; -export { ProxyTracerProvider } from './trace/ProxyTracerProvider'; -export { SamplingDecision } from './trace/SamplingResult'; -export { SpanKind } from './trace/span_kind'; -export { SpanStatusCode } from './trace/status'; -export { TraceFlags } from './trace/trace_flags'; -export { createTraceState } from './trace/internal/utils'; -export { isSpanContextValid, isValidTraceId, isValidSpanId, } from './trace/spancontext-utils'; -export { INVALID_SPANID, INVALID_TRACEID, INVALID_SPAN_CONTEXT, } from './trace/invalid-span-constants'; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -import { context } from './context-api'; -import { diag } from './diag-api'; -import { metrics } from './metrics-api'; -import { propagation } from './propagation-api'; -import { trace } from './trace-api'; -// Named export. -export { context, diag, metrics, propagation, trace }; -// Default export. -export default { - context, - diag, - metrics, - propagation, - trace, -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/internal/global-utils.js b/node_modules/@opentelemetry/api/build/esnext/internal/global-utils.js deleted file mode 100644 index acdd18528..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/internal/global-utils.js +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { _globalThis } from '../platform'; -import { VERSION } from '../version'; -import { isCompatible } from './semver'; -const major = VERSION.split('.')[0]; -const GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(`opentelemetry.js.api.${major}`); -const _global = _globalThis; -export function registerGlobal(type, instance, diag, allowOverride = false) { - var _a; - const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { - version: VERSION, - }); - if (!allowOverride && api[type]) { - // already registered an API of this type - const err = new Error(`@opentelemetry/api: Attempted duplicate registration of API: ${type}`); - diag.error(err.stack || err.message); - return false; - } - if (api.version !== VERSION) { - // All registered APIs must be of the same version exactly - const err = new Error(`@opentelemetry/api: Registration of version v${api.version} for ${type} does not match previously registered API v${VERSION}`); - diag.error(err.stack || err.message); - return false; - } - api[type] = instance; - diag.debug(`@opentelemetry/api: Registered a global for ${type} v${VERSION}.`); - return true; -} -export function getGlobal(type) { - var _a, _b; - const globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; - if (!globalVersion || !isCompatible(globalVersion)) { - return; - } - return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; -} -export function unregisterGlobal(type, diag) { - diag.debug(`@opentelemetry/api: Unregistering a global for ${type} v${VERSION}.`); - const api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; - if (api) { - delete api[type]; - } -} -//# sourceMappingURL=global-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/internal/semver.js b/node_modules/@opentelemetry/api/build/esnext/internal/semver.js deleted file mode 100644 index 85e5980e5..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/internal/semver.js +++ /dev/null @@ -1,118 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { VERSION } from '../version'; -const re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; -/** - * Create a function to test an API version to see if it is compatible with the provided ownVersion. - * - * The returned function has the following semantics: - * - Exact match is always compatible - * - Major versions must match exactly - * - 1.x package cannot use global 2.x package - * - 2.x package cannot use global 1.x package - * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API - * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects - * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 - * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor - * - Patch and build tag differences are not considered at this time - * - * @param ownVersion version which should be checked against - */ -export function _makeCompatibilityCheck(ownVersion) { - const acceptedVersions = new Set([ownVersion]); - const rejectedVersions = new Set(); - const myVersionMatch = ownVersion.match(re); - if (!myVersionMatch) { - // we cannot guarantee compatibility so we always return noop - return () => false; - } - const ownVersionParsed = { - major: +myVersionMatch[1], - minor: +myVersionMatch[2], - patch: +myVersionMatch[3], - prerelease: myVersionMatch[4], - }; - // if ownVersion has a prerelease tag, versions must match exactly - if (ownVersionParsed.prerelease != null) { - return function isExactmatch(globalVersion) { - return globalVersion === ownVersion; - }; - } - function _reject(v) { - rejectedVersions.add(v); - return false; - } - function _accept(v) { - acceptedVersions.add(v); - return true; - } - return function isCompatible(globalVersion) { - if (acceptedVersions.has(globalVersion)) { - return true; - } - if (rejectedVersions.has(globalVersion)) { - return false; - } - const globalVersionMatch = globalVersion.match(re); - if (!globalVersionMatch) { - // cannot parse other version - // we cannot guarantee compatibility so we always noop - return _reject(globalVersion); - } - const globalVersionParsed = { - major: +globalVersionMatch[1], - minor: +globalVersionMatch[2], - patch: +globalVersionMatch[3], - prerelease: globalVersionMatch[4], - }; - // if globalVersion has a prerelease tag, versions must match exactly - if (globalVersionParsed.prerelease != null) { - return _reject(globalVersion); - } - // major versions must match - if (ownVersionParsed.major !== globalVersionParsed.major) { - return _reject(globalVersion); - } - if (ownVersionParsed.major === 0) { - if (ownVersionParsed.minor === globalVersionParsed.minor && - ownVersionParsed.patch <= globalVersionParsed.patch) { - return _accept(globalVersion); - } - return _reject(globalVersion); - } - if (ownVersionParsed.minor <= globalVersionParsed.minor) { - return _accept(globalVersion); - } - return _reject(globalVersion); - }; -} -/** - * Test an API version to see if it is compatible with this API. - * - * - Exact match is always compatible - * - Major versions must match exactly - * - 1.x package cannot use global 2.x package - * - 2.x package cannot use global 1.x package - * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API - * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects - * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 - * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor - * - Patch and build tag differences are not considered at this time - * - * @param version version of the API requesting an instance of the global API - */ -export const isCompatible = _makeCompatibilityCheck(VERSION); -//# sourceMappingURL=semver.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics-api.js b/node_modules/@opentelemetry/api/build/esnext/metrics-api.js deleted file mode 100644 index 624c88635..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/metrics-api.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -import { MetricsAPI } from './api/metrics'; -/** Entrypoint for metrics API */ -export const metrics = MetricsAPI.getInstance(); -//# sourceMappingURL=metrics-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/Meter.js b/node_modules/@opentelemetry/api/build/esnext/metrics/Meter.js deleted file mode 100644 index f1d0754d5..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/metrics/Meter.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=Meter.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/MeterProvider.js b/node_modules/@opentelemetry/api/build/esnext/metrics/MeterProvider.js deleted file mode 100644 index 3051712f0..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/metrics/MeterProvider.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=MeterProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/Metric.js b/node_modules/@opentelemetry/api/build/esnext/metrics/Metric.js deleted file mode 100644 index 6df137415..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/metrics/Metric.js +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** The Type of value. It describes how the data is reported. */ -export var ValueType; -(function (ValueType) { - ValueType[ValueType["INT"] = 0] = "INT"; - ValueType[ValueType["DOUBLE"] = 1] = "DOUBLE"; -})(ValueType || (ValueType = {})); -//# sourceMappingURL=Metric.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeter.js b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeter.js deleted file mode 100644 index 65659c0ab..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeter.js +++ /dev/null @@ -1,103 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses - * constant NoopMetrics for all of its methods. - */ -export class NoopMeter { - constructor() { } - /** - * @see {@link Meter.createHistogram} - */ - createHistogram(_name, _options) { - return NOOP_HISTOGRAM_METRIC; - } - /** - * @see {@link Meter.createCounter} - */ - createCounter(_name, _options) { - return NOOP_COUNTER_METRIC; - } - /** - * @see {@link Meter.createUpDownCounter} - */ - createUpDownCounter(_name, _options) { - return NOOP_UP_DOWN_COUNTER_METRIC; - } - /** - * @see {@link Meter.createObservableGauge} - */ - createObservableGauge(_name, _options) { - return NOOP_OBSERVABLE_GAUGE_METRIC; - } - /** - * @see {@link Meter.createObservableCounter} - */ - createObservableCounter(_name, _options) { - return NOOP_OBSERVABLE_COUNTER_METRIC; - } - /** - * @see {@link Meter.createObservableUpDownCounter} - */ - createObservableUpDownCounter(_name, _options) { - return NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC; - } - /** - * @see {@link Meter.addBatchObservableCallback} - */ - addBatchObservableCallback(_callback, _observables) { } - /** - * @see {@link Meter.removeBatchObservableCallback} - */ - removeBatchObservableCallback(_callback) { } -} -export class NoopMetric { -} -export class NoopCounterMetric extends NoopMetric { - add(_value, _attributes) { } -} -export class NoopUpDownCounterMetric extends NoopMetric { - add(_value, _attributes) { } -} -export class NoopHistogramMetric extends NoopMetric { - record(_value, _attributes) { } -} -export class NoopObservableMetric { - addCallback(_callback) { } - removeCallback(_callback) { } -} -export class NoopObservableCounterMetric extends NoopObservableMetric { -} -export class NoopObservableGaugeMetric extends NoopObservableMetric { -} -export class NoopObservableUpDownCounterMetric extends NoopObservableMetric { -} -export const NOOP_METER = new NoopMeter(); -// Synchronous instruments -export const NOOP_COUNTER_METRIC = new NoopCounterMetric(); -export const NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric(); -export const NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric(); -// Asynchronous instruments -export const NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric(); -export const NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric(); -export const NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = new NoopObservableUpDownCounterMetric(); -/** - * Create a no-op Meter - */ -export function createNoopMeter() { - return NOOP_METER; -} -//# sourceMappingURL=NoopMeter.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeterProvider.js b/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeterProvider.js deleted file mode 100644 index 075f5c668..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/metrics/NoopMeterProvider.js +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { NOOP_METER } from './NoopMeter'; -/** - * An implementation of the {@link MeterProvider} which returns an impotent Meter - * for all calls to `getMeter` - */ -export class NoopMeterProvider { - getMeter(_name, _version, _options) { - return NOOP_METER; - } -} -export const NOOP_METER_PROVIDER = new NoopMeterProvider(); -//# sourceMappingURL=NoopMeterProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/metrics/ObservableResult.js b/node_modules/@opentelemetry/api/build/esnext/metrics/ObservableResult.js deleted file mode 100644 index 7985d26dd..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/metrics/ObservableResult.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=ObservableResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/browser/globalThis.js b/node_modules/@opentelemetry/api/build/esnext/platform/browser/globalThis.js deleted file mode 100644 index e9d58b3c5..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/platform/browser/globalThis.js +++ /dev/null @@ -1,35 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// Updates to this file should also be replicated to @opentelemetry/core too. -/** - * - globalThis (New standard) - * - self (Will return the current window instance for supported browsers) - * - window (fallback for older browser implementations) - * - global (NodeJS implementation) - * - (When all else fails) - */ -/** only globals that common to node and browsers are allowed */ -// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef -export const _globalThis = typeof globalThis === 'object' - ? globalThis - : typeof self === 'object' - ? self - : typeof window === 'object' - ? window - : typeof global === 'object' - ? global - : {}; -//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/browser/index.js b/node_modules/@opentelemetry/api/build/esnext/platform/browser/index.js deleted file mode 100644 index efcad2e9c..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/platform/browser/index.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export * from './globalThis'; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/index.js b/node_modules/@opentelemetry/api/build/esnext/platform/index.js deleted file mode 100644 index c0df125ca..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/platform/index.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export * from './node'; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/node/globalThis.js b/node_modules/@opentelemetry/api/build/esnext/platform/node/globalThis.js deleted file mode 100644 index 7daa45d82..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/platform/node/globalThis.js +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** only globals that common to node and browsers are allowed */ -// eslint-disable-next-line node/no-unsupported-features/es-builtins -export const _globalThis = typeof globalThis === 'object' ? globalThis : global; -//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/platform/node/index.js b/node_modules/@opentelemetry/api/build/esnext/platform/node/index.js deleted file mode 100644 index efcad2e9c..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/platform/node/index.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export * from './globalThis'; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/propagation-api.js b/node_modules/@opentelemetry/api/build/esnext/propagation-api.js deleted file mode 100644 index 7964ed9e0..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/propagation-api.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -import { PropagationAPI } from './api/propagation'; -/** Entrypoint for propagation API */ -export const propagation = PropagationAPI.getInstance(); -//# sourceMappingURL=propagation-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/propagation/NoopTextMapPropagator.js b/node_modules/@opentelemetry/api/build/esnext/propagation/NoopTextMapPropagator.js deleted file mode 100644 index 642b84eda..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/propagation/NoopTextMapPropagator.js +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * No-op implementations of {@link TextMapPropagator}. - */ -export class NoopTextMapPropagator { - /** Noop inject function does nothing */ - inject(_context, _carrier) { } - /** Noop extract function does nothing and returns the input context */ - extract(context, _carrier) { - return context; - } - fields() { - return []; - } -} -//# sourceMappingURL=NoopTextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/propagation/TextMapPropagator.js b/node_modules/@opentelemetry/api/build/esnext/propagation/TextMapPropagator.js deleted file mode 100644 index 4c369958b..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/propagation/TextMapPropagator.js +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export const defaultTextMapGetter = { - get(carrier, key) { - if (carrier == null) { - return undefined; - } - return carrier[key]; - }, - keys(carrier) { - if (carrier == null) { - return []; - } - return Object.keys(carrier); - }, -}; -export const defaultTextMapSetter = { - set(carrier, key, value) { - if (carrier == null) { - return; - } - carrier[key] = value; - }, -}; -//# sourceMappingURL=TextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace-api.js b/node_modules/@opentelemetry/api/build/esnext/trace-api.js deleted file mode 100644 index 3a0b9b0fb..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace-api.js +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -import { TraceAPI } from './api/trace'; -/** Entrypoint for trace API */ -export const trace = TraceAPI.getInstance(); -//# sourceMappingURL=trace-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/NonRecordingSpan.js b/node_modules/@opentelemetry/api/build/esnext/trace/NonRecordingSpan.js deleted file mode 100644 index b008a6b8f..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/NonRecordingSpan.js +++ /dev/null @@ -1,59 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { INVALID_SPAN_CONTEXT } from './invalid-span-constants'; -/** - * The NonRecordingSpan is the default {@link Span} that is used when no Span - * implementation is available. All operations are no-op including context - * propagation. - */ -export class NonRecordingSpan { - constructor(_spanContext = INVALID_SPAN_CONTEXT) { - this._spanContext = _spanContext; - } - // Returns a SpanContext. - spanContext() { - return this._spanContext; - } - // By default does nothing - setAttribute(_key, _value) { - return this; - } - // By default does nothing - setAttributes(_attributes) { - return this; - } - // By default does nothing - addEvent(_name, _attributes) { - return this; - } - // By default does nothing - setStatus(_status) { - return this; - } - // By default does nothing - updateName(_name) { - return this; - } - // By default does nothing - end(_endTime) { } - // isRecording always returns false for NonRecordingSpan. - isRecording() { - return false; - } - // By default does nothing - recordException(_exception, _time) { } -} -//# sourceMappingURL=NonRecordingSpan.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracer.js b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracer.js deleted file mode 100644 index ddc7760b6..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracer.js +++ /dev/null @@ -1,71 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { ContextAPI } from '../api/context'; -import { getSpanContext, setSpan } from '../trace/context-utils'; -import { NonRecordingSpan } from './NonRecordingSpan'; -import { isSpanContextValid } from './spancontext-utils'; -const contextApi = ContextAPI.getInstance(); -/** - * No-op implementations of {@link Tracer}. - */ -export class NoopTracer { - // startSpan starts a noop span. - startSpan(name, options, context = contextApi.active()) { - const root = Boolean(options === null || options === void 0 ? void 0 : options.root); - if (root) { - return new NonRecordingSpan(); - } - const parentFromContext = context && getSpanContext(context); - if (isSpanContext(parentFromContext) && - isSpanContextValid(parentFromContext)) { - return new NonRecordingSpan(parentFromContext); - } - else { - return new NonRecordingSpan(); - } - } - startActiveSpan(name, arg2, arg3, arg4) { - let opts; - let ctx; - let fn; - if (arguments.length < 2) { - return; - } - else if (arguments.length === 2) { - fn = arg2; - } - else if (arguments.length === 3) { - opts = arg2; - fn = arg3; - } - else { - opts = arg2; - ctx = arg3; - fn = arg4; - } - const parentContext = ctx !== null && ctx !== void 0 ? ctx : contextApi.active(); - const span = this.startSpan(name, opts, parentContext); - const contextWithSpanSet = setSpan(parentContext, span); - return contextApi.with(contextWithSpanSet, fn, undefined, span); - } -} -function isSpanContext(spanContext) { - return (typeof spanContext === 'object' && - typeof spanContext['spanId'] === 'string' && - typeof spanContext['traceId'] === 'string' && - typeof spanContext['traceFlags'] === 'number'); -} -//# sourceMappingURL=NoopTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracerProvider.js b/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracerProvider.js deleted file mode 100644 index b542b7dab..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/NoopTracerProvider.js +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { NoopTracer } from './NoopTracer'; -/** - * An implementation of the {@link TracerProvider} which returns an impotent - * Tracer for all calls to `getTracer`. - * - * All operations are no-op. - */ -export class NoopTracerProvider { - getTracer(_name, _version, _options) { - return new NoopTracer(); - } -} -//# sourceMappingURL=NoopTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracer.js b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracer.js deleted file mode 100644 index 94dcb020f..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracer.js +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { NoopTracer } from './NoopTracer'; -const NOOP_TRACER = new NoopTracer(); -/** - * Proxy tracer provided by the proxy tracer provider - */ -export class ProxyTracer { - constructor(_provider, name, version, options) { - this._provider = _provider; - this.name = name; - this.version = version; - this.options = options; - } - startSpan(name, options, context) { - return this._getTracer().startSpan(name, options, context); - } - startActiveSpan(_name, _options, _context, _fn) { - const tracer = this._getTracer(); - return Reflect.apply(tracer.startActiveSpan, tracer, arguments); - } - /** - * Try to get a tracer from the proxy tracer provider. - * If the proxy tracer provider has no delegate, return a noop tracer. - */ - _getTracer() { - if (this._delegate) { - return this._delegate; - } - const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); - if (!tracer) { - return NOOP_TRACER; - } - this._delegate = tracer; - return this._delegate; - } -} -//# sourceMappingURL=ProxyTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracerProvider.js b/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracerProvider.js deleted file mode 100644 index b62fb8161..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/ProxyTracerProvider.js +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { ProxyTracer } from './ProxyTracer'; -import { NoopTracerProvider } from './NoopTracerProvider'; -const NOOP_TRACER_PROVIDER = new NoopTracerProvider(); -/** - * Tracer provider which provides {@link ProxyTracer}s. - * - * Before a delegate is set, tracers provided are NoOp. - * When a delegate is set, traces are provided from the delegate. - * When a delegate is set after tracers have already been provided, - * all tracers already provided will use the provided delegate implementation. - */ -export class ProxyTracerProvider { - /** - * Get a {@link ProxyTracer} - */ - getTracer(name, version, options) { - var _a; - return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer(this, name, version, options)); - } - getDelegate() { - var _a; - return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; - } - /** - * Set the delegate tracer provider - */ - setDelegate(delegate) { - this._delegate = delegate; - } - getDelegateTracer(name, version, options) { - var _a; - return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); - } -} -//# sourceMappingURL=ProxyTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/Sampler.js b/node_modules/@opentelemetry/api/build/esnext/trace/Sampler.js deleted file mode 100644 index 22a60a127..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/Sampler.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=Sampler.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/SamplingResult.js b/node_modules/@opentelemetry/api/build/esnext/trace/SamplingResult.js deleted file mode 100644 index be657416b..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/SamplingResult.js +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -/** - * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. - * A sampling decision that determines how a {@link Span} will be recorded - * and collected. - */ -export var SamplingDecision; -(function (SamplingDecision) { - /** - * `Span.isRecording() === false`, span will not be recorded and all events - * and attributes will be dropped. - */ - SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; - /** - * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} - * MUST NOT be set. - */ - SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; - /** - * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} - * MUST be set. - */ - SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; -})(SamplingDecision || (SamplingDecision = {})); -//# sourceMappingURL=SamplingResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/SpanOptions.js b/node_modules/@opentelemetry/api/build/esnext/trace/SpanOptions.js deleted file mode 100644 index 06b42b151..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/SpanOptions.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=SpanOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/attributes.js b/node_modules/@opentelemetry/api/build/esnext/trace/attributes.js deleted file mode 100644 index 6f1b9a3f8..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/attributes.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/context-utils.js b/node_modules/@opentelemetry/api/build/esnext/trace/context-utils.js deleted file mode 100644 index 5d113f136..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/context-utils.js +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { createContextKey } from '../context/context'; -import { NonRecordingSpan } from './NonRecordingSpan'; -import { ContextAPI } from '../api/context'; -/** - * span key - */ -const SPAN_KEY = createContextKey('OpenTelemetry Context Key SPAN'); -/** - * Return the span if one exists - * - * @param context context to get span from - */ -export function getSpan(context) { - return context.getValue(SPAN_KEY) || undefined; -} -/** - * Gets the span from the current context, if one exists. - */ -export function getActiveSpan() { - return getSpan(ContextAPI.getInstance().active()); -} -/** - * Set the span on a context - * - * @param context context to use as parent - * @param span span to set active - */ -export function setSpan(context, span) { - return context.setValue(SPAN_KEY, span); -} -/** - * Remove current span stored in the context - * - * @param context context to delete span from - */ -export function deleteSpan(context) { - return context.deleteValue(SPAN_KEY); -} -/** - * Wrap span context in a NoopSpan and set as span in a new - * context - * - * @param context context to set active span on - * @param spanContext span context to be wrapped - */ -export function setSpanContext(context, spanContext) { - return setSpan(context, new NonRecordingSpan(spanContext)); -} -/** - * Get the span context of the span if it exists. - * - * @param context context to get values from - */ -export function getSpanContext(context) { - var _a; - return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); -} -//# sourceMappingURL=context-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-impl.js b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-impl.js deleted file mode 100644 index 3be2ea3ce..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-impl.js +++ /dev/null @@ -1,99 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { validateKey, validateValue } from './tracestate-validators'; -const MAX_TRACE_STATE_ITEMS = 32; -const MAX_TRACE_STATE_LEN = 512; -const LIST_MEMBERS_SEPARATOR = ','; -const LIST_MEMBER_KEY_VALUE_SPLITTER = '='; -/** - * TraceState must be a class and not a simple object type because of the spec - * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). - * - * Here is the list of allowed mutations: - * - New key-value pair should be added into the beginning of the list - * - The value of any key can be updated. Modified keys MUST be moved to the - * beginning of the list. - */ -export class TraceStateImpl { - constructor(rawTraceState) { - this._internalState = new Map(); - if (rawTraceState) - this._parse(rawTraceState); - } - set(key, value) { - // TODO: Benchmark the different approaches(map vs list) and - // use the faster one. - const traceState = this._clone(); - if (traceState._internalState.has(key)) { - traceState._internalState.delete(key); - } - traceState._internalState.set(key, value); - return traceState; - } - unset(key) { - const traceState = this._clone(); - traceState._internalState.delete(key); - return traceState; - } - get(key) { - return this._internalState.get(key); - } - serialize() { - return this._keys() - .reduce((agg, key) => { - agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key)); - return agg; - }, []) - .join(LIST_MEMBERS_SEPARATOR); - } - _parse(rawTraceState) { - if (rawTraceState.length > MAX_TRACE_STATE_LEN) - return; - this._internalState = rawTraceState - .split(LIST_MEMBERS_SEPARATOR) - .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning - .reduce((agg, part) => { - const listMember = part.trim(); // Optional Whitespace (OWS) handling - const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); - if (i !== -1) { - const key = listMember.slice(0, i); - const value = listMember.slice(i + 1, part.length); - if (validateKey(key) && validateValue(value)) { - agg.set(key, value); - } - else { - // TODO: Consider to add warning log - } - } - return agg; - }, new Map()); - // Because of the reverse() requirement, trunc must be done after map is created - if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { - this._internalState = new Map(Array.from(this._internalState.entries()) - .reverse() // Use reverse same as original tracestate parse chain - .slice(0, MAX_TRACE_STATE_ITEMS)); - } - } - _keys() { - return Array.from(this._internalState.keys()).reverse(); - } - _clone() { - const traceState = new TraceStateImpl(); - traceState._internalState = new Map(this._internalState); - return traceState; - } -} -//# sourceMappingURL=tracestate-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-validators.js b/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-validators.js deleted file mode 100644 index 3a4f95fba..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/internal/tracestate-validators.js +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -const VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; -const VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`; -const VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`; -const VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`); -const VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; -const INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; -/** - * Key is opaque string up to 256 characters printable. It MUST begin with a - * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, - * underscores _, dashes -, asterisks *, and forward slashes /. - * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the - * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. - * see https://www.w3.org/TR/trace-context/#key - */ -export function validateKey(key) { - return VALID_KEY_REGEX.test(key); -} -/** - * Value is opaque string up to 256 characters printable ASCII RFC0020 - * characters (i.e., the range 0x20 to 0x7E) except comma , and =. - */ -export function validateValue(value) { - return (VALID_VALUE_BASE_REGEX.test(value) && - !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); -} -//# sourceMappingURL=tracestate-validators.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/internal/utils.js b/node_modules/@opentelemetry/api/build/esnext/trace/internal/utils.js deleted file mode 100644 index feea46914..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/internal/utils.js +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { TraceStateImpl } from './tracestate-impl'; -export function createTraceState(rawTraceState) { - return new TraceStateImpl(rawTraceState); -} -//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/invalid-span-constants.js b/node_modules/@opentelemetry/api/build/esnext/trace/invalid-span-constants.js deleted file mode 100644 index bd912f4dc..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/invalid-span-constants.js +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { TraceFlags } from './trace_flags'; -export const INVALID_SPANID = '0000000000000000'; -export const INVALID_TRACEID = '00000000000000000000000000000000'; -export const INVALID_SPAN_CONTEXT = { - traceId: INVALID_TRACEID, - spanId: INVALID_SPANID, - traceFlags: TraceFlags.NONE, -}; -//# sourceMappingURL=invalid-span-constants.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/link.js b/node_modules/@opentelemetry/api/build/esnext/trace/link.js deleted file mode 100644 index 7c8accbe1..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/link.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=link.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/span.js b/node_modules/@opentelemetry/api/build/esnext/trace/span.js deleted file mode 100644 index f41c7f6f5..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/span.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=span.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/span_context.js b/node_modules/@opentelemetry/api/build/esnext/trace/span_context.js deleted file mode 100644 index 1bb88b0d7..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/span_context.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=span_context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/span_kind.js b/node_modules/@opentelemetry/api/build/esnext/trace/span_kind.js deleted file mode 100644 index 1119df92f..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/span_kind.js +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export var SpanKind; -(function (SpanKind) { - /** Default value. Indicates that the span is used internally. */ - SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; - /** - * Indicates that the span covers server-side handling of an RPC or other - * remote request. - */ - SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; - /** - * Indicates that the span covers the client-side wrapper around an RPC or - * other remote request. - */ - SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; - /** - * Indicates that the span describes producer sending a message to a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; - /** - * Indicates that the span describes consumer receiving a message from a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; -})(SpanKind || (SpanKind = {})); -//# sourceMappingURL=span_kind.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/spancontext-utils.js b/node_modules/@opentelemetry/api/build/esnext/trace/spancontext-utils.js deleted file mode 100644 index a51187ae6..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/spancontext-utils.js +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -import { INVALID_SPANID, INVALID_TRACEID } from './invalid-span-constants'; -import { NonRecordingSpan } from './NonRecordingSpan'; -const VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; -const VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; -export function isValidTraceId(traceId) { - return VALID_TRACEID_REGEX.test(traceId) && traceId !== INVALID_TRACEID; -} -export function isValidSpanId(spanId) { - return VALID_SPANID_REGEX.test(spanId) && spanId !== INVALID_SPANID; -} -/** - * Returns true if this {@link SpanContext} is valid. - * @return true if this {@link SpanContext} is valid. - */ -export function isSpanContextValid(spanContext) { - return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); -} -/** - * Wrap the given {@link SpanContext} in a new non-recording {@link Span} - * - * @param spanContext span context to be wrapped - * @returns a new non-recording {@link Span} with the provided context - */ -export function wrapSpanContext(spanContext) { - return new NonRecordingSpan(spanContext); -} -//# sourceMappingURL=spancontext-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/status.js b/node_modules/@opentelemetry/api/build/esnext/trace/status.js deleted file mode 100644 index 5ee55e42f..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/status.js +++ /dev/null @@ -1,20 +0,0 @@ -/** - * An enumeration of status codes. - */ -export var SpanStatusCode; -(function (SpanStatusCode) { - /** - * The default status. - */ - SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; - /** - * The operation has been validated by an Application developer or - * Operator to have completed successfully. - */ - SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; - /** - * The operation contains an error. - */ - SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; -})(SpanStatusCode || (SpanStatusCode = {})); -//# sourceMappingURL=status.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/trace_flags.js b/node_modules/@opentelemetry/api/build/esnext/trace/trace_flags.js deleted file mode 100644 index 8a7b00072..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/trace_flags.js +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export var TraceFlags; -(function (TraceFlags) { - /** Represents no flag set. */ - TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; - /** Bit to represent whether trace is sampled in trace flags. */ - TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; -})(TraceFlags || (TraceFlags = {})); -//# sourceMappingURL=trace_flags.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/trace_state.js b/node_modules/@opentelemetry/api/build/esnext/trace/trace_state.js deleted file mode 100644 index a6c368f88..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/trace_state.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=trace_state.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/tracer.js b/node_modules/@opentelemetry/api/build/esnext/trace/tracer.js deleted file mode 100644 index ad066dc3f..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/tracer.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=tracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/tracer_options.js b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_options.js deleted file mode 100644 index 470a3a732..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/tracer_options.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=tracer_options.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/trace/tracer_provider.js b/node_modules/@opentelemetry/api/build/esnext/trace/tracer_provider.js deleted file mode 100644 index adf432a65..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/trace/tracer_provider.js +++ /dev/null @@ -1,17 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -export {}; -//# sourceMappingURL=tracer_provider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/esnext/version.js b/node_modules/@opentelemetry/api/build/esnext/version.js deleted file mode 100644 index 77a98c416..000000000 --- a/node_modules/@opentelemetry/api/build/esnext/version.js +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -// this is autogenerated file, see scripts/version-update.js -export const VERSION = '1.8.0'; -//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/context.js b/node_modules/@opentelemetry/api/build/src/api/context.js deleted file mode 100644 index 8af551f82..000000000 --- a/node_modules/@opentelemetry/api/build/src/api/context.js +++ /dev/null @@ -1,81 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ContextAPI = void 0; -const NoopContextManager_1 = require("../context/NoopContextManager"); -const global_utils_1 = require("../internal/global-utils"); -const diag_1 = require("./diag"); -const API_NAME = 'context'; -const NOOP_CONTEXT_MANAGER = new NoopContextManager_1.NoopContextManager(); -/** - * Singleton object which represents the entry point to the OpenTelemetry Context API - */ -class ContextAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { } - /** Get the singleton instance of the Context API */ - static getInstance() { - if (!this._instance) { - this._instance = new ContextAPI(); - } - return this._instance; - } - /** - * Set the current context manager. - * - * @returns true if the context manager was successfully registered, else false - */ - setGlobalContextManager(contextManager) { - return (0, global_utils_1.registerGlobal)(API_NAME, contextManager, diag_1.DiagAPI.instance()); - } - /** - * Get the currently active context - */ - active() { - return this._getContextManager().active(); - } - /** - * Execute a function with an active context - * - * @param context context to be active during function execution - * @param fn function to execute in a context - * @param thisArg optional receiver to be used for calling fn - * @param args optional arguments forwarded to fn - */ - with(context, fn, thisArg, ...args) { - return this._getContextManager().with(context, fn, thisArg, ...args); - } - /** - * Bind a context to a target function or event emitter - * - * @param context context to bind to the event emitter or function. Defaults to the currently active context - * @param target function or event emitter to bind - */ - bind(context, target) { - return this._getContextManager().bind(context, target); - } - _getContextManager() { - return (0, global_utils_1.getGlobal)(API_NAME) || NOOP_CONTEXT_MANAGER; - } - /** Disable and remove the global context manager */ - disable() { - this._getContextManager().disable(); - (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); - } -} -exports.ContextAPI = ContextAPI; -//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/diag.js b/node_modules/@opentelemetry/api/build/src/api/diag.js deleted file mode 100644 index 945692323..000000000 --- a/node_modules/@opentelemetry/api/build/src/api/diag.js +++ /dev/null @@ -1,93 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DiagAPI = void 0; -const ComponentLogger_1 = require("../diag/ComponentLogger"); -const logLevelLogger_1 = require("../diag/internal/logLevelLogger"); -const types_1 = require("../diag/types"); -const global_utils_1 = require("../internal/global-utils"); -const API_NAME = 'diag'; -/** - * Singleton object which represents the entry point to the OpenTelemetry internal - * diagnostic API - */ -class DiagAPI { - /** - * Private internal constructor - * @private - */ - constructor() { - function _logProxy(funcName) { - return function (...args) { - const logger = (0, global_utils_1.getGlobal)('diag'); - // shortcut if logger not set - if (!logger) - return; - return logger[funcName](...args); - }; - } - // Using self local variable for minification purposes as 'this' cannot be minified - const self = this; - // DiagAPI specific functions - const setLogger = (logger, optionsOrLogLevel = { logLevel: types_1.DiagLogLevel.INFO }) => { - var _a, _b, _c; - if (logger === self) { - // There isn't much we can do here. - // Logging to the console might break the user application. - // Try to log to self. If a logger was previously registered it will receive the log. - const err = new Error('Cannot use diag as the logger for itself. Please use a DiagLogger implementation like ConsoleDiagLogger or a custom implementation'); - self.error((_a = err.stack) !== null && _a !== void 0 ? _a : err.message); - return false; - } - if (typeof optionsOrLogLevel === 'number') { - optionsOrLogLevel = { - logLevel: optionsOrLogLevel, - }; - } - const oldLogger = (0, global_utils_1.getGlobal)('diag'); - const newLogger = (0, logLevelLogger_1.createLogLevelDiagLogger)((_b = optionsOrLogLevel.logLevel) !== null && _b !== void 0 ? _b : types_1.DiagLogLevel.INFO, logger); - // There already is an logger registered. We'll let it know before overwriting it. - if (oldLogger && !optionsOrLogLevel.suppressOverrideMessage) { - const stack = (_c = new Error().stack) !== null && _c !== void 0 ? _c : ''; - oldLogger.warn(`Current logger will be overwritten from ${stack}`); - newLogger.warn(`Current logger will overwrite one already registered from ${stack}`); - } - return (0, global_utils_1.registerGlobal)('diag', newLogger, self, true); - }; - self.setLogger = setLogger; - self.disable = () => { - (0, global_utils_1.unregisterGlobal)(API_NAME, self); - }; - self.createComponentLogger = (options) => { - return new ComponentLogger_1.DiagComponentLogger(options); - }; - self.verbose = _logProxy('verbose'); - self.debug = _logProxy('debug'); - self.info = _logProxy('info'); - self.warn = _logProxy('warn'); - self.error = _logProxy('error'); - } - /** Get the singleton instance of the DiagAPI API */ - static instance() { - if (!this._instance) { - this._instance = new DiagAPI(); - } - return this._instance; - } -} -exports.DiagAPI = DiagAPI; -//# sourceMappingURL=diag.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/metrics.js b/node_modules/@opentelemetry/api/build/src/api/metrics.js deleted file mode 100644 index 4bbc43318..000000000 --- a/node_modules/@opentelemetry/api/build/src/api/metrics.js +++ /dev/null @@ -1,61 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.MetricsAPI = void 0; -const NoopMeterProvider_1 = require("../metrics/NoopMeterProvider"); -const global_utils_1 = require("../internal/global-utils"); -const diag_1 = require("./diag"); -const API_NAME = 'metrics'; -/** - * Singleton object which represents the entry point to the OpenTelemetry Metrics API - */ -class MetricsAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { } - /** Get the singleton instance of the Metrics API */ - static getInstance() { - if (!this._instance) { - this._instance = new MetricsAPI(); - } - return this._instance; - } - /** - * Set the current global meter provider. - * Returns true if the meter provider was successfully registered, else false. - */ - setGlobalMeterProvider(provider) { - return (0, global_utils_1.registerGlobal)(API_NAME, provider, diag_1.DiagAPI.instance()); - } - /** - * Returns the global meter provider. - */ - getMeterProvider() { - return (0, global_utils_1.getGlobal)(API_NAME) || NoopMeterProvider_1.NOOP_METER_PROVIDER; - } - /** - * Returns a meter from the global meter provider. - */ - getMeter(name, version, options) { - return this.getMeterProvider().getMeter(name, version, options); - } - /** Remove the global meter provider */ - disable() { - (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); - } -} -exports.MetricsAPI = MetricsAPI; -//# sourceMappingURL=metrics.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/propagation.js b/node_modules/@opentelemetry/api/build/src/api/propagation.js deleted file mode 100644 index 7f03df81f..000000000 --- a/node_modules/@opentelemetry/api/build/src/api/propagation.js +++ /dev/null @@ -1,89 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.PropagationAPI = void 0; -const global_utils_1 = require("../internal/global-utils"); -const NoopTextMapPropagator_1 = require("../propagation/NoopTextMapPropagator"); -const TextMapPropagator_1 = require("../propagation/TextMapPropagator"); -const context_helpers_1 = require("../baggage/context-helpers"); -const utils_1 = require("../baggage/utils"); -const diag_1 = require("./diag"); -const API_NAME = 'propagation'; -const NOOP_TEXT_MAP_PROPAGATOR = new NoopTextMapPropagator_1.NoopTextMapPropagator(); -/** - * Singleton object which represents the entry point to the OpenTelemetry Propagation API - */ -class PropagationAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { - this.createBaggage = utils_1.createBaggage; - this.getBaggage = context_helpers_1.getBaggage; - this.getActiveBaggage = context_helpers_1.getActiveBaggage; - this.setBaggage = context_helpers_1.setBaggage; - this.deleteBaggage = context_helpers_1.deleteBaggage; - } - /** Get the singleton instance of the Propagator API */ - static getInstance() { - if (!this._instance) { - this._instance = new PropagationAPI(); - } - return this._instance; - } - /** - * Set the current propagator. - * - * @returns true if the propagator was successfully registered, else false - */ - setGlobalPropagator(propagator) { - return (0, global_utils_1.registerGlobal)(API_NAME, propagator, diag_1.DiagAPI.instance()); - } - /** - * Inject context into a carrier to be propagated inter-process - * - * @param context Context carrying tracing data to inject - * @param carrier carrier to inject context into - * @param setter Function used to set values on the carrier - */ - inject(context, carrier, setter = TextMapPropagator_1.defaultTextMapSetter) { - return this._getGlobalPropagator().inject(context, carrier, setter); - } - /** - * Extract context from a carrier - * - * @param context Context which the newly created context will inherit from - * @param carrier Carrier to extract context from - * @param getter Function used to extract keys from a carrier - */ - extract(context, carrier, getter = TextMapPropagator_1.defaultTextMapGetter) { - return this._getGlobalPropagator().extract(context, carrier, getter); - } - /** - * Return a list of all fields which may be used by the propagator. - */ - fields() { - return this._getGlobalPropagator().fields(); - } - /** Remove the global propagator */ - disable() { - (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); - } - _getGlobalPropagator() { - return (0, global_utils_1.getGlobal)(API_NAME) || NOOP_TEXT_MAP_PROPAGATOR; - } -} -exports.PropagationAPI = PropagationAPI; -//# sourceMappingURL=propagation.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/api/trace.js b/node_modules/@opentelemetry/api/build/src/api/trace.js deleted file mode 100644 index aa7a9da56..000000000 --- a/node_modules/@opentelemetry/api/build/src/api/trace.js +++ /dev/null @@ -1,79 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TraceAPI = void 0; -const global_utils_1 = require("../internal/global-utils"); -const ProxyTracerProvider_1 = require("../trace/ProxyTracerProvider"); -const spancontext_utils_1 = require("../trace/spancontext-utils"); -const context_utils_1 = require("../trace/context-utils"); -const diag_1 = require("./diag"); -const API_NAME = 'trace'; -/** - * Singleton object which represents the entry point to the OpenTelemetry Tracing API - */ -class TraceAPI { - /** Empty private constructor prevents end users from constructing a new instance of the API */ - constructor() { - this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); - this.wrapSpanContext = spancontext_utils_1.wrapSpanContext; - this.isSpanContextValid = spancontext_utils_1.isSpanContextValid; - this.deleteSpan = context_utils_1.deleteSpan; - this.getSpan = context_utils_1.getSpan; - this.getActiveSpan = context_utils_1.getActiveSpan; - this.getSpanContext = context_utils_1.getSpanContext; - this.setSpan = context_utils_1.setSpan; - this.setSpanContext = context_utils_1.setSpanContext; - } - /** Get the singleton instance of the Trace API */ - static getInstance() { - if (!this._instance) { - this._instance = new TraceAPI(); - } - return this._instance; - } - /** - * Set the current global tracer. - * - * @returns true if the tracer provider was successfully registered, else false - */ - setGlobalTracerProvider(provider) { - const success = (0, global_utils_1.registerGlobal)(API_NAME, this._proxyTracerProvider, diag_1.DiagAPI.instance()); - if (success) { - this._proxyTracerProvider.setDelegate(provider); - } - return success; - } - /** - * Returns the global tracer provider. - */ - getTracerProvider() { - return (0, global_utils_1.getGlobal)(API_NAME) || this._proxyTracerProvider; - } - /** - * Returns a tracer from the global tracer provider. - */ - getTracer(name, version) { - return this.getTracerProvider().getTracer(name, version); - } - /** Remove the global tracer provider */ - disable() { - (0, global_utils_1.unregisterGlobal)(API_NAME, diag_1.DiagAPI.instance()); - this._proxyTracerProvider = new ProxyTracerProvider_1.ProxyTracerProvider(); - } -} -exports.TraceAPI = TraceAPI; -//# sourceMappingURL=trace.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js b/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js deleted file mode 100644 index cc0f00bf1..000000000 --- a/node_modules/@opentelemetry/api/build/src/baggage/context-helpers.js +++ /dev/null @@ -1,63 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.deleteBaggage = exports.setBaggage = exports.getActiveBaggage = exports.getBaggage = void 0; -const context_1 = require("../api/context"); -const context_2 = require("../context/context"); -/** - * Baggage key - */ -const BAGGAGE_KEY = (0, context_2.createContextKey)('OpenTelemetry Baggage Key'); -/** - * Retrieve the current baggage from the given context - * - * @param {Context} Context that manage all context values - * @returns {Baggage} Extracted baggage from the context - */ -function getBaggage(context) { - return context.getValue(BAGGAGE_KEY) || undefined; -} -exports.getBaggage = getBaggage; -/** - * Retrieve the current baggage from the active/current context - * - * @returns {Baggage} Extracted baggage from the context - */ -function getActiveBaggage() { - return getBaggage(context_1.ContextAPI.getInstance().active()); -} -exports.getActiveBaggage = getActiveBaggage; -/** - * Store a baggage in the given context - * - * @param {Context} Context that manage all context values - * @param {Baggage} baggage that will be set in the actual context - */ -function setBaggage(context, baggage) { - return context.setValue(BAGGAGE_KEY, baggage); -} -exports.setBaggage = setBaggage; -/** - * Delete the baggage stored in the given context - * - * @param {Context} Context that manage all context values - */ -function deleteBaggage(context) { - return context.deleteValue(BAGGAGE_KEY); -} -exports.deleteBaggage = deleteBaggage; -//# sourceMappingURL=context-helpers.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js b/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js deleted file mode 100644 index 6f04d4a67..000000000 --- a/node_modules/@opentelemetry/api/build/src/baggage/internal/baggage-impl.js +++ /dev/null @@ -1,55 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.BaggageImpl = void 0; -class BaggageImpl { - constructor(entries) { - this._entries = entries ? new Map(entries) : new Map(); - } - getEntry(key) { - const entry = this._entries.get(key); - if (!entry) { - return undefined; - } - return Object.assign({}, entry); - } - getAllEntries() { - return Array.from(this._entries.entries()).map(([k, v]) => [k, v]); - } - setEntry(key, entry) { - const newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.set(key, entry); - return newBaggage; - } - removeEntry(key) { - const newBaggage = new BaggageImpl(this._entries); - newBaggage._entries.delete(key); - return newBaggage; - } - removeEntries(...keys) { - const newBaggage = new BaggageImpl(this._entries); - for (const key of keys) { - newBaggage._entries.delete(key); - } - return newBaggage; - } - clear() { - return new BaggageImpl(); - } -} -exports.BaggageImpl = BaggageImpl; -//# sourceMappingURL=baggage-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js b/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js deleted file mode 100644 index 324c216dd..000000000 --- a/node_modules/@opentelemetry/api/build/src/baggage/internal/symbol.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.baggageEntryMetadataSymbol = void 0; -/** - * Symbol used to make BaggageEntryMetadata an opaque type - */ -exports.baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); -//# sourceMappingURL=symbol.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/types.js b/node_modules/@opentelemetry/api/build/src/baggage/types.js deleted file mode 100644 index c428c6d09..000000000 --- a/node_modules/@opentelemetry/api/build/src/baggage/types.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/baggage/utils.js b/node_modules/@opentelemetry/api/build/src/baggage/utils.js deleted file mode 100644 index a0bfbf6c3..000000000 --- a/node_modules/@opentelemetry/api/build/src/baggage/utils.js +++ /dev/null @@ -1,51 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.baggageEntryMetadataFromString = exports.createBaggage = void 0; -const diag_1 = require("../api/diag"); -const baggage_impl_1 = require("./internal/baggage-impl"); -const symbol_1 = require("./internal/symbol"); -const diag = diag_1.DiagAPI.instance(); -/** - * Create a new Baggage with optional entries - * - * @param entries An array of baggage entries the new baggage should contain - */ -function createBaggage(entries = {}) { - return new baggage_impl_1.BaggageImpl(new Map(Object.entries(entries))); -} -exports.createBaggage = createBaggage; -/** - * Create a serializable BaggageEntryMetadata object from a string. - * - * @param str string metadata. Format is currently not defined by the spec and has no special meaning. - * - */ -function baggageEntryMetadataFromString(str) { - if (typeof str !== 'string') { - diag.error(`Cannot create baggage metadata from unknown type: ${typeof str}`); - str = ''; - } - return { - __TYPE__: symbol_1.baggageEntryMetadataSymbol, - toString() { - return str; - }, - }; -} -exports.baggageEntryMetadataFromString = baggageEntryMetadataFromString; -//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Attributes.js b/node_modules/@opentelemetry/api/build/src/common/Attributes.js deleted file mode 100644 index 684c93db9..000000000 --- a/node_modules/@opentelemetry/api/build/src/common/Attributes.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=Attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Exception.js b/node_modules/@opentelemetry/api/build/src/common/Exception.js deleted file mode 100644 index ed450aef6..000000000 --- a/node_modules/@opentelemetry/api/build/src/common/Exception.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=Exception.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/common/Time.js b/node_modules/@opentelemetry/api/build/src/common/Time.js deleted file mode 100644 index 1faaf6983..000000000 --- a/node_modules/@opentelemetry/api/build/src/common/Time.js +++ /dev/null @@ -1,3 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=Time.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context-api.js b/node_modules/@opentelemetry/api/build/src/context-api.js deleted file mode 100644 index b9aeea932..000000000 --- a/node_modules/@opentelemetry/api/build/src/context-api.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.context = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const context_1 = require("./api/context"); -/** Entrypoint for context API */ -exports.context = context_1.ContextAPI.getInstance(); -//# sourceMappingURL=context-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js b/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js deleted file mode 100644 index 10c6ae1bd..000000000 --- a/node_modules/@opentelemetry/api/build/src/context/NoopContextManager.js +++ /dev/null @@ -1,38 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.NoopContextManager = void 0; -const context_1 = require("./context"); -class NoopContextManager { - active() { - return context_1.ROOT_CONTEXT; - } - with(_context, fn, thisArg, ...args) { - return fn.call(thisArg, ...args); - } - bind(_context, target) { - return target; - } - enable() { - return this; - } - disable() { - return this; - } -} -exports.NoopContextManager = NoopContextManager; -//# sourceMappingURL=NoopContextManager.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/context.js b/node_modules/@opentelemetry/api/build/src/context/context.js deleted file mode 100644 index eecc15931..000000000 --- a/node_modules/@opentelemetry/api/build/src/context/context.js +++ /dev/null @@ -1,55 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ROOT_CONTEXT = exports.createContextKey = void 0; -/** Get a key to uniquely identify a context value */ -function createContextKey(description) { - // The specification states that for the same input, multiple calls should - // return different keys. Due to the nature of the JS dependency management - // system, this creates problems where multiple versions of some package - // could hold different keys for the same property. - // - // Therefore, we use Symbol.for which returns the same key for the same input. - return Symbol.for(description); -} -exports.createContextKey = createContextKey; -class BaseContext { - /** - * Construct a new context which inherits values from an optional parent context. - * - * @param parentContext a context from which to inherit values - */ - constructor(parentContext) { - // for minification - const self = this; - self._currentContext = parentContext ? new Map(parentContext) : new Map(); - self.getValue = (key) => self._currentContext.get(key); - self.setValue = (key, value) => { - const context = new BaseContext(self._currentContext); - context._currentContext.set(key, value); - return context; - }; - self.deleteValue = (key) => { - const context = new BaseContext(self._currentContext); - context._currentContext.delete(key); - return context; - }; - } -} -/** The root context is used as the default parent context when there is no active context */ -exports.ROOT_CONTEXT = new BaseContext(); -//# sourceMappingURL=context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/context/types.js b/node_modules/@opentelemetry/api/build/src/context/types.js deleted file mode 100644 index c428c6d09..000000000 --- a/node_modules/@opentelemetry/api/build/src/context/types.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag-api.js b/node_modules/@opentelemetry/api/build/src/diag-api.js deleted file mode 100644 index cbf28db3c..000000000 --- a/node_modules/@opentelemetry/api/build/src/diag-api.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.diag = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const diag_1 = require("./api/diag"); -/** - * Entrypoint for Diag API. - * Defines Diagnostic handler used for internal diagnostic logging operations. - * The default provides a Noop DiagLogger implementation which may be changed via the - * diag.setLogger(logger: DiagLogger) function. - */ -exports.diag = diag_1.DiagAPI.instance(); -//# sourceMappingURL=diag-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js b/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js deleted file mode 100644 index 579b7e68f..000000000 --- a/node_modules/@opentelemetry/api/build/src/diag/ComponentLogger.js +++ /dev/null @@ -1,59 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DiagComponentLogger = void 0; -const global_utils_1 = require("../internal/global-utils"); -/** - * Component Logger which is meant to be used as part of any component which - * will add automatically additional namespace in front of the log message. - * It will then forward all message to global diag logger - * @example - * const cLogger = diag.createComponentLogger({ namespace: '@opentelemetry/instrumentation-http' }); - * cLogger.debug('test'); - * // @opentelemetry/instrumentation-http test - */ -class DiagComponentLogger { - constructor(props) { - this._namespace = props.namespace || 'DiagComponentLogger'; - } - debug(...args) { - return logProxy('debug', this._namespace, args); - } - error(...args) { - return logProxy('error', this._namespace, args); - } - info(...args) { - return logProxy('info', this._namespace, args); - } - warn(...args) { - return logProxy('warn', this._namespace, args); - } - verbose(...args) { - return logProxy('verbose', this._namespace, args); - } -} -exports.DiagComponentLogger = DiagComponentLogger; -function logProxy(funcName, namespace, args) { - const logger = (0, global_utils_1.getGlobal)('diag'); - // shortcut if logger not set - if (!logger) { - return; - } - args.unshift(namespace); - return logger[funcName](...args); -} -//# sourceMappingURL=ComponentLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js b/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js deleted file mode 100644 index 1962275ff..000000000 --- a/node_modules/@opentelemetry/api/build/src/diag/consoleLogger.js +++ /dev/null @@ -1,57 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DiagConsoleLogger = void 0; -const consoleMap = [ - { n: 'error', c: 'error' }, - { n: 'warn', c: 'warn' }, - { n: 'info', c: 'info' }, - { n: 'debug', c: 'debug' }, - { n: 'verbose', c: 'trace' }, -]; -/** - * A simple Immutable Console based diagnostic logger which will output any messages to the Console. - * If you want to limit the amount of logging to a specific level or lower use the - * {@link createLogLevelDiagLogger} - */ -class DiagConsoleLogger { - constructor() { - function _consoleFunc(funcName) { - return function (...args) { - if (console) { - // Some environments only expose the console when the F12 developer console is open - // eslint-disable-next-line no-console - let theFunc = console[funcName]; - if (typeof theFunc !== 'function') { - // Not all environments support all functions - // eslint-disable-next-line no-console - theFunc = console.log; - } - // One last final check - if (typeof theFunc === 'function') { - return theFunc.apply(console, args); - } - } - }; - } - for (let i = 0; i < consoleMap.length; i++) { - this[consoleMap[i].n] = _consoleFunc(consoleMap[i].c); - } - } -} -exports.DiagConsoleLogger = DiagConsoleLogger; -//# sourceMappingURL=consoleLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js b/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js deleted file mode 100644 index ee1702e5f..000000000 --- a/node_modules/@opentelemetry/api/build/src/diag/internal/logLevelLogger.js +++ /dev/null @@ -1,45 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createLogLevelDiagLogger = void 0; -const types_1 = require("../types"); -function createLogLevelDiagLogger(maxLevel, logger) { - if (maxLevel < types_1.DiagLogLevel.NONE) { - maxLevel = types_1.DiagLogLevel.NONE; - } - else if (maxLevel > types_1.DiagLogLevel.ALL) { - maxLevel = types_1.DiagLogLevel.ALL; - } - // In case the logger is null or undefined - logger = logger || {}; - function _filterFunc(funcName, theLevel) { - const theFunc = logger[funcName]; - if (typeof theFunc === 'function' && maxLevel >= theLevel) { - return theFunc.bind(logger); - } - return function () { }; - } - return { - error: _filterFunc('error', types_1.DiagLogLevel.ERROR), - warn: _filterFunc('warn', types_1.DiagLogLevel.WARN), - info: _filterFunc('info', types_1.DiagLogLevel.INFO), - debug: _filterFunc('debug', types_1.DiagLogLevel.DEBUG), - verbose: _filterFunc('verbose', types_1.DiagLogLevel.VERBOSE), - }; -} -exports.createLogLevelDiagLogger = createLogLevelDiagLogger; -//# sourceMappingURL=logLevelLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js b/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js deleted file mode 100644 index 409163145..000000000 --- a/node_modules/@opentelemetry/api/build/src/diag/internal/noopLogger.js +++ /dev/null @@ -1,35 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createNoopDiagLogger = void 0; -function noopLogFunction() { } -/** - * Returns a No-Op Diagnostic logger where all messages do nothing. - * @implements {@link DiagLogger} - * @returns {DiagLogger} - */ -function createNoopDiagLogger() { - return { - verbose: noopLogFunction, - debug: noopLogFunction, - info: noopLogFunction, - warn: noopLogFunction, - error: noopLogFunction, - }; -} -exports.createNoopDiagLogger = createNoopDiagLogger; -//# sourceMappingURL=noopLogger.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/diag/types.js b/node_modules/@opentelemetry/api/build/src/diag/types.js deleted file mode 100644 index c195e45e8..000000000 --- a/node_modules/@opentelemetry/api/build/src/diag/types.js +++ /dev/null @@ -1,44 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.DiagLogLevel = void 0; -/** - * Defines the available internal logging levels for the diagnostic logger, the numeric values - * of the levels are defined to match the original values from the initial LogLevel to avoid - * compatibility/migration issues for any implementation that assume the numeric ordering. - */ -var DiagLogLevel; -(function (DiagLogLevel) { - /** Diagnostic Logging level setting to disable all logging (except and forced logs) */ - DiagLogLevel[DiagLogLevel["NONE"] = 0] = "NONE"; - /** Identifies an error scenario */ - DiagLogLevel[DiagLogLevel["ERROR"] = 30] = "ERROR"; - /** Identifies a warning scenario */ - DiagLogLevel[DiagLogLevel["WARN"] = 50] = "WARN"; - /** General informational log message */ - DiagLogLevel[DiagLogLevel["INFO"] = 60] = "INFO"; - /** General debug log message */ - DiagLogLevel[DiagLogLevel["DEBUG"] = 70] = "DEBUG"; - /** - * Detailed trace level logging should only be used for development, should only be set - * in a development environment. - */ - DiagLogLevel[DiagLogLevel["VERBOSE"] = 80] = "VERBOSE"; - /** Used to set the logging level to include all logging */ - DiagLogLevel[DiagLogLevel["ALL"] = 9999] = "ALL"; -})(DiagLogLevel = exports.DiagLogLevel || (exports.DiagLogLevel = {})); -//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/experimental/index.js b/node_modules/@opentelemetry/api/build/src/experimental/index.js deleted file mode 100644 index bd611ec0c..000000000 --- a/node_modules/@opentelemetry/api/build/src/experimental/index.js +++ /dev/null @@ -1,22 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SugaredTracer = exports.wrapTracer = void 0; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var SugaredTracer_1 = require("./trace/SugaredTracer"); -Object.defineProperty(exports, "wrapTracer", { enumerable: true, get: function () { return SugaredTracer_1.wrapTracer; } }); -Object.defineProperty(exports, "SugaredTracer", { enumerable: true, get: function () { return SugaredTracer_1.SugaredTracer; } }); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredOptions.js b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredOptions.js deleted file mode 100644 index a18d65b22..000000000 --- a/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredOptions.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=SugaredOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredTracer.js b/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredTracer.js deleted file mode 100644 index aae624982..000000000 --- a/node_modules/@opentelemetry/api/build/src/experimental/trace/SugaredTracer.js +++ /dev/null @@ -1,93 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SugaredTracer = exports.wrapTracer = void 0; -const __1 = require("../../"); -const defaultOnException = (e, span) => { - span.recordException(e); - span.setStatus({ - code: __1.SpanStatusCode.ERROR, - }); -}; -/** - * return a new SugaredTracer created from the supplied one - * @param tracer - */ -function wrapTracer(tracer) { - return new SugaredTracer(tracer); -} -exports.wrapTracer = wrapTracer; -class SugaredTracer { - constructor(tracer) { - this._tracer = tracer; - this.startSpan = tracer.startSpan.bind(this._tracer); - this.startActiveSpan = tracer.startActiveSpan.bind(this._tracer); - } - withActiveSpan(name, arg2, arg3, arg4) { - const { opts, ctx, fn } = massageParams(arg2, arg3, arg4); - return this._tracer.startActiveSpan(name, opts, ctx, (span) => handleFn(span, opts, fn)); - } - withSpan(name, arg2, arg3, arg4) { - const { opts, ctx, fn } = massageParams(arg2, arg3, arg4); - const span = this._tracer.startSpan(name, opts, ctx); - return handleFn(span, opts, fn); - } -} -exports.SugaredTracer = SugaredTracer; -/** - * Massages parameters of withSpan and withActiveSpan to allow signature overwrites - * @param arg - * @param arg2 - * @param arg3 - */ -function massageParams(arg, arg2, arg3) { - let opts; - let ctx; - let fn; - if (!arg2 && !arg3) { - fn = arg; - } - else if (!arg3) { - opts = arg; - fn = arg2; - } - else { - opts = arg; - ctx = arg2; - fn = arg3; - } - opts = opts !== null && opts !== void 0 ? opts : {}; - ctx = ctx !== null && ctx !== void 0 ? ctx : __1.context.active(); - return { opts, ctx, fn }; -} -/** - * Executes fn, returns results and runs onException in the case of exception to allow overwriting of error handling - * @param span - * @param opts - * @param fn - */ -function handleFn(span, opts, fn) { - var _a; - const onException = (_a = opts.onException) !== null && _a !== void 0 ? _a : defaultOnException; - const errorHandler = (e) => { - onException(e, span); - span.end(); - throw e; - }; - try { - const ret = fn(span); - // if fn is an async function, attach a recordException and spanEnd callback to the promise - if (typeof (ret === null || ret === void 0 ? void 0 : ret.then) === 'function') { - return ret.then(val => { - span.end(); - return val; - }, errorHandler); - } - span.end(); - return ret; - } - catch (e) { - // add throw to signal the compiler that this will throw in the inner scope - throw errorHandler(e); - } -} -//# sourceMappingURL=SugaredTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/index.js b/node_modules/@opentelemetry/api/build/src/index.js deleted file mode 100644 index cb0a87232..000000000 --- a/node_modules/@opentelemetry/api/build/src/index.js +++ /dev/null @@ -1,81 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.trace = exports.propagation = exports.metrics = exports.diag = exports.context = exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = exports.isValidSpanId = exports.isValidTraceId = exports.isSpanContextValid = exports.createTraceState = exports.TraceFlags = exports.SpanStatusCode = exports.SpanKind = exports.SamplingDecision = exports.ProxyTracerProvider = exports.ProxyTracer = exports.defaultTextMapSetter = exports.defaultTextMapGetter = exports.ValueType = exports.createNoopMeter = exports.DiagLogLevel = exports.DiagConsoleLogger = exports.ROOT_CONTEXT = exports.createContextKey = exports.baggageEntryMetadataFromString = void 0; -var utils_1 = require("./baggage/utils"); -Object.defineProperty(exports, "baggageEntryMetadataFromString", { enumerable: true, get: function () { return utils_1.baggageEntryMetadataFromString; } }); -// Context APIs -var context_1 = require("./context/context"); -Object.defineProperty(exports, "createContextKey", { enumerable: true, get: function () { return context_1.createContextKey; } }); -Object.defineProperty(exports, "ROOT_CONTEXT", { enumerable: true, get: function () { return context_1.ROOT_CONTEXT; } }); -// Diag APIs -var consoleLogger_1 = require("./diag/consoleLogger"); -Object.defineProperty(exports, "DiagConsoleLogger", { enumerable: true, get: function () { return consoleLogger_1.DiagConsoleLogger; } }); -var types_1 = require("./diag/types"); -Object.defineProperty(exports, "DiagLogLevel", { enumerable: true, get: function () { return types_1.DiagLogLevel; } }); -// Metrics APIs -var NoopMeter_1 = require("./metrics/NoopMeter"); -Object.defineProperty(exports, "createNoopMeter", { enumerable: true, get: function () { return NoopMeter_1.createNoopMeter; } }); -var Metric_1 = require("./metrics/Metric"); -Object.defineProperty(exports, "ValueType", { enumerable: true, get: function () { return Metric_1.ValueType; } }); -// Propagation APIs -var TextMapPropagator_1 = require("./propagation/TextMapPropagator"); -Object.defineProperty(exports, "defaultTextMapGetter", { enumerable: true, get: function () { return TextMapPropagator_1.defaultTextMapGetter; } }); -Object.defineProperty(exports, "defaultTextMapSetter", { enumerable: true, get: function () { return TextMapPropagator_1.defaultTextMapSetter; } }); -var ProxyTracer_1 = require("./trace/ProxyTracer"); -Object.defineProperty(exports, "ProxyTracer", { enumerable: true, get: function () { return ProxyTracer_1.ProxyTracer; } }); -var ProxyTracerProvider_1 = require("./trace/ProxyTracerProvider"); -Object.defineProperty(exports, "ProxyTracerProvider", { enumerable: true, get: function () { return ProxyTracerProvider_1.ProxyTracerProvider; } }); -var SamplingResult_1 = require("./trace/SamplingResult"); -Object.defineProperty(exports, "SamplingDecision", { enumerable: true, get: function () { return SamplingResult_1.SamplingDecision; } }); -var span_kind_1 = require("./trace/span_kind"); -Object.defineProperty(exports, "SpanKind", { enumerable: true, get: function () { return span_kind_1.SpanKind; } }); -var status_1 = require("./trace/status"); -Object.defineProperty(exports, "SpanStatusCode", { enumerable: true, get: function () { return status_1.SpanStatusCode; } }); -var trace_flags_1 = require("./trace/trace_flags"); -Object.defineProperty(exports, "TraceFlags", { enumerable: true, get: function () { return trace_flags_1.TraceFlags; } }); -var utils_2 = require("./trace/internal/utils"); -Object.defineProperty(exports, "createTraceState", { enumerable: true, get: function () { return utils_2.createTraceState; } }); -var spancontext_utils_1 = require("./trace/spancontext-utils"); -Object.defineProperty(exports, "isSpanContextValid", { enumerable: true, get: function () { return spancontext_utils_1.isSpanContextValid; } }); -Object.defineProperty(exports, "isValidTraceId", { enumerable: true, get: function () { return spancontext_utils_1.isValidTraceId; } }); -Object.defineProperty(exports, "isValidSpanId", { enumerable: true, get: function () { return spancontext_utils_1.isValidSpanId; } }); -var invalid_span_constants_1 = require("./trace/invalid-span-constants"); -Object.defineProperty(exports, "INVALID_SPANID", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPANID; } }); -Object.defineProperty(exports, "INVALID_TRACEID", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_TRACEID; } }); -Object.defineProperty(exports, "INVALID_SPAN_CONTEXT", { enumerable: true, get: function () { return invalid_span_constants_1.INVALID_SPAN_CONTEXT; } }); -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const context_api_1 = require("./context-api"); -Object.defineProperty(exports, "context", { enumerable: true, get: function () { return context_api_1.context; } }); -const diag_api_1 = require("./diag-api"); -Object.defineProperty(exports, "diag", { enumerable: true, get: function () { return diag_api_1.diag; } }); -const metrics_api_1 = require("./metrics-api"); -Object.defineProperty(exports, "metrics", { enumerable: true, get: function () { return metrics_api_1.metrics; } }); -const propagation_api_1 = require("./propagation-api"); -Object.defineProperty(exports, "propagation", { enumerable: true, get: function () { return propagation_api_1.propagation; } }); -const trace_api_1 = require("./trace-api"); -Object.defineProperty(exports, "trace", { enumerable: true, get: function () { return trace_api_1.trace; } }); -// Default export. -exports.default = { - context: context_api_1.context, - diag: diag_api_1.diag, - metrics: metrics_api_1.metrics, - propagation: propagation_api_1.propagation, - trace: trace_api_1.trace, -}; -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/global-utils.js b/node_modules/@opentelemetry/api/build/src/internal/global-utils.js deleted file mode 100644 index 11a1a441d..000000000 --- a/node_modules/@opentelemetry/api/build/src/internal/global-utils.js +++ /dev/null @@ -1,64 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.unregisterGlobal = exports.getGlobal = exports.registerGlobal = void 0; -const platform_1 = require("../platform"); -const version_1 = require("../version"); -const semver_1 = require("./semver"); -const major = version_1.VERSION.split('.')[0]; -const GLOBAL_OPENTELEMETRY_API_KEY = Symbol.for(`opentelemetry.js.api.${major}`); -const _global = platform_1._globalThis; -function registerGlobal(type, instance, diag, allowOverride = false) { - var _a; - const api = (_global[GLOBAL_OPENTELEMETRY_API_KEY] = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) !== null && _a !== void 0 ? _a : { - version: version_1.VERSION, - }); - if (!allowOverride && api[type]) { - // already registered an API of this type - const err = new Error(`@opentelemetry/api: Attempted duplicate registration of API: ${type}`); - diag.error(err.stack || err.message); - return false; - } - if (api.version !== version_1.VERSION) { - // All registered APIs must be of the same version exactly - const err = new Error(`@opentelemetry/api: Registration of version v${api.version} for ${type} does not match previously registered API v${version_1.VERSION}`); - diag.error(err.stack || err.message); - return false; - } - api[type] = instance; - diag.debug(`@opentelemetry/api: Registered a global for ${type} v${version_1.VERSION}.`); - return true; -} -exports.registerGlobal = registerGlobal; -function getGlobal(type) { - var _a, _b; - const globalVersion = (_a = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _a === void 0 ? void 0 : _a.version; - if (!globalVersion || !(0, semver_1.isCompatible)(globalVersion)) { - return; - } - return (_b = _global[GLOBAL_OPENTELEMETRY_API_KEY]) === null || _b === void 0 ? void 0 : _b[type]; -} -exports.getGlobal = getGlobal; -function unregisterGlobal(type, diag) { - diag.debug(`@opentelemetry/api: Unregistering a global for ${type} v${version_1.VERSION}.`); - const api = _global[GLOBAL_OPENTELEMETRY_API_KEY]; - if (api) { - delete api[type]; - } -} -exports.unregisterGlobal = unregisterGlobal; -//# sourceMappingURL=global-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/internal/semver.js b/node_modules/@opentelemetry/api/build/src/internal/semver.js deleted file mode 100644 index 7a073b224..000000000 --- a/node_modules/@opentelemetry/api/build/src/internal/semver.js +++ /dev/null @@ -1,122 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.isCompatible = exports._makeCompatibilityCheck = void 0; -const version_1 = require("../version"); -const re = /^(\d+)\.(\d+)\.(\d+)(-(.+))?$/; -/** - * Create a function to test an API version to see if it is compatible with the provided ownVersion. - * - * The returned function has the following semantics: - * - Exact match is always compatible - * - Major versions must match exactly - * - 1.x package cannot use global 2.x package - * - 2.x package cannot use global 1.x package - * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API - * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects - * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 - * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor - * - Patch and build tag differences are not considered at this time - * - * @param ownVersion version which should be checked against - */ -function _makeCompatibilityCheck(ownVersion) { - const acceptedVersions = new Set([ownVersion]); - const rejectedVersions = new Set(); - const myVersionMatch = ownVersion.match(re); - if (!myVersionMatch) { - // we cannot guarantee compatibility so we always return noop - return () => false; - } - const ownVersionParsed = { - major: +myVersionMatch[1], - minor: +myVersionMatch[2], - patch: +myVersionMatch[3], - prerelease: myVersionMatch[4], - }; - // if ownVersion has a prerelease tag, versions must match exactly - if (ownVersionParsed.prerelease != null) { - return function isExactmatch(globalVersion) { - return globalVersion === ownVersion; - }; - } - function _reject(v) { - rejectedVersions.add(v); - return false; - } - function _accept(v) { - acceptedVersions.add(v); - return true; - } - return function isCompatible(globalVersion) { - if (acceptedVersions.has(globalVersion)) { - return true; - } - if (rejectedVersions.has(globalVersion)) { - return false; - } - const globalVersionMatch = globalVersion.match(re); - if (!globalVersionMatch) { - // cannot parse other version - // we cannot guarantee compatibility so we always noop - return _reject(globalVersion); - } - const globalVersionParsed = { - major: +globalVersionMatch[1], - minor: +globalVersionMatch[2], - patch: +globalVersionMatch[3], - prerelease: globalVersionMatch[4], - }; - // if globalVersion has a prerelease tag, versions must match exactly - if (globalVersionParsed.prerelease != null) { - return _reject(globalVersion); - } - // major versions must match - if (ownVersionParsed.major !== globalVersionParsed.major) { - return _reject(globalVersion); - } - if (ownVersionParsed.major === 0) { - if (ownVersionParsed.minor === globalVersionParsed.minor && - ownVersionParsed.patch <= globalVersionParsed.patch) { - return _accept(globalVersion); - } - return _reject(globalVersion); - } - if (ownVersionParsed.minor <= globalVersionParsed.minor) { - return _accept(globalVersion); - } - return _reject(globalVersion); - }; -} -exports._makeCompatibilityCheck = _makeCompatibilityCheck; -/** - * Test an API version to see if it is compatible with this API. - * - * - Exact match is always compatible - * - Major versions must match exactly - * - 1.x package cannot use global 2.x package - * - 2.x package cannot use global 1.x package - * - The minor version of the API module requesting access to the global API must be less than or equal to the minor version of this API - * - 1.3 package may use 1.4 global because the later global contains all functions 1.3 expects - * - 1.4 package may NOT use 1.3 global because it may try to call functions which don't exist on 1.3 - * - If the major version is 0, the minor version is treated as the major and the patch is treated as the minor - * - Patch and build tag differences are not considered at this time - * - * @param version version of the API requesting an instance of the global API - */ -exports.isCompatible = _makeCompatibilityCheck(version_1.VERSION); -//# sourceMappingURL=semver.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics-api.js b/node_modules/@opentelemetry/api/build/src/metrics-api.js deleted file mode 100644 index 987f7c25a..000000000 --- a/node_modules/@opentelemetry/api/build/src/metrics-api.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.metrics = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const metrics_1 = require("./api/metrics"); -/** Entrypoint for metrics API */ -exports.metrics = metrics_1.MetricsAPI.getInstance(); -//# sourceMappingURL=metrics-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/Meter.js b/node_modules/@opentelemetry/api/build/src/metrics/Meter.js deleted file mode 100644 index 56b930c6e..000000000 --- a/node_modules/@opentelemetry/api/build/src/metrics/Meter.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=Meter.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.js b/node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.js deleted file mode 100644 index e94205e7c..000000000 --- a/node_modules/@opentelemetry/api/build/src/metrics/MeterProvider.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=MeterProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/Metric.js b/node_modules/@opentelemetry/api/build/src/metrics/Metric.js deleted file mode 100644 index 4966c3d51..000000000 --- a/node_modules/@opentelemetry/api/build/src/metrics/Metric.js +++ /dev/null @@ -1,25 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ValueType = void 0; -/** The Type of value. It describes how the data is reported. */ -var ValueType; -(function (ValueType) { - ValueType[ValueType["INT"] = 0] = "INT"; - ValueType[ValueType["DOUBLE"] = 1] = "DOUBLE"; -})(ValueType = exports.ValueType || (exports.ValueType = {})); -//# sourceMappingURL=Metric.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.js b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.js deleted file mode 100644 index 4c7c92282..000000000 --- a/node_modules/@opentelemetry/api/build/src/metrics/NoopMeter.js +++ /dev/null @@ -1,116 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createNoopMeter = exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = exports.NOOP_OBSERVABLE_GAUGE_METRIC = exports.NOOP_OBSERVABLE_COUNTER_METRIC = exports.NOOP_UP_DOWN_COUNTER_METRIC = exports.NOOP_HISTOGRAM_METRIC = exports.NOOP_COUNTER_METRIC = exports.NOOP_METER = exports.NoopObservableUpDownCounterMetric = exports.NoopObservableGaugeMetric = exports.NoopObservableCounterMetric = exports.NoopObservableMetric = exports.NoopHistogramMetric = exports.NoopUpDownCounterMetric = exports.NoopCounterMetric = exports.NoopMetric = exports.NoopMeter = void 0; -/** - * NoopMeter is a noop implementation of the {@link Meter} interface. It reuses - * constant NoopMetrics for all of its methods. - */ -class NoopMeter { - constructor() { } - /** - * @see {@link Meter.createHistogram} - */ - createHistogram(_name, _options) { - return exports.NOOP_HISTOGRAM_METRIC; - } - /** - * @see {@link Meter.createCounter} - */ - createCounter(_name, _options) { - return exports.NOOP_COUNTER_METRIC; - } - /** - * @see {@link Meter.createUpDownCounter} - */ - createUpDownCounter(_name, _options) { - return exports.NOOP_UP_DOWN_COUNTER_METRIC; - } - /** - * @see {@link Meter.createObservableGauge} - */ - createObservableGauge(_name, _options) { - return exports.NOOP_OBSERVABLE_GAUGE_METRIC; - } - /** - * @see {@link Meter.createObservableCounter} - */ - createObservableCounter(_name, _options) { - return exports.NOOP_OBSERVABLE_COUNTER_METRIC; - } - /** - * @see {@link Meter.createObservableUpDownCounter} - */ - createObservableUpDownCounter(_name, _options) { - return exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC; - } - /** - * @see {@link Meter.addBatchObservableCallback} - */ - addBatchObservableCallback(_callback, _observables) { } - /** - * @see {@link Meter.removeBatchObservableCallback} - */ - removeBatchObservableCallback(_callback) { } -} -exports.NoopMeter = NoopMeter; -class NoopMetric { -} -exports.NoopMetric = NoopMetric; -class NoopCounterMetric extends NoopMetric { - add(_value, _attributes) { } -} -exports.NoopCounterMetric = NoopCounterMetric; -class NoopUpDownCounterMetric extends NoopMetric { - add(_value, _attributes) { } -} -exports.NoopUpDownCounterMetric = NoopUpDownCounterMetric; -class NoopHistogramMetric extends NoopMetric { - record(_value, _attributes) { } -} -exports.NoopHistogramMetric = NoopHistogramMetric; -class NoopObservableMetric { - addCallback(_callback) { } - removeCallback(_callback) { } -} -exports.NoopObservableMetric = NoopObservableMetric; -class NoopObservableCounterMetric extends NoopObservableMetric { -} -exports.NoopObservableCounterMetric = NoopObservableCounterMetric; -class NoopObservableGaugeMetric extends NoopObservableMetric { -} -exports.NoopObservableGaugeMetric = NoopObservableGaugeMetric; -class NoopObservableUpDownCounterMetric extends NoopObservableMetric { -} -exports.NoopObservableUpDownCounterMetric = NoopObservableUpDownCounterMetric; -exports.NOOP_METER = new NoopMeter(); -// Synchronous instruments -exports.NOOP_COUNTER_METRIC = new NoopCounterMetric(); -exports.NOOP_HISTOGRAM_METRIC = new NoopHistogramMetric(); -exports.NOOP_UP_DOWN_COUNTER_METRIC = new NoopUpDownCounterMetric(); -// Asynchronous instruments -exports.NOOP_OBSERVABLE_COUNTER_METRIC = new NoopObservableCounterMetric(); -exports.NOOP_OBSERVABLE_GAUGE_METRIC = new NoopObservableGaugeMetric(); -exports.NOOP_OBSERVABLE_UP_DOWN_COUNTER_METRIC = new NoopObservableUpDownCounterMetric(); -/** - * Create a no-op Meter - */ -function createNoopMeter() { - return exports.NOOP_METER; -} -exports.createNoopMeter = createNoopMeter; -//# sourceMappingURL=NoopMeter.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.js b/node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.js deleted file mode 100644 index b1c1cc067..000000000 --- a/node_modules/@opentelemetry/api/build/src/metrics/NoopMeterProvider.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.NOOP_METER_PROVIDER = exports.NoopMeterProvider = void 0; -const NoopMeter_1 = require("./NoopMeter"); -/** - * An implementation of the {@link MeterProvider} which returns an impotent Meter - * for all calls to `getMeter` - */ -class NoopMeterProvider { - getMeter(_name, _version, _options) { - return NoopMeter_1.NOOP_METER; - } -} -exports.NoopMeterProvider = NoopMeterProvider; -exports.NOOP_METER_PROVIDER = new NoopMeterProvider(); -//# sourceMappingURL=NoopMeterProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/metrics/ObservableResult.js b/node_modules/@opentelemetry/api/build/src/metrics/ObservableResult.js deleted file mode 100644 index 7e5cbd0ed..000000000 --- a/node_modules/@opentelemetry/api/build/src/metrics/ObservableResult.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=ObservableResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js b/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js deleted file mode 100644 index 15c8d21a7..000000000 --- a/node_modules/@opentelemetry/api/build/src/platform/browser/globalThis.js +++ /dev/null @@ -1,38 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports._globalThis = void 0; -// Updates to this file should also be replicated to @opentelemetry/core too. -/** - * - globalThis (New standard) - * - self (Will return the current window instance for supported browsers) - * - window (fallback for older browser implementations) - * - global (NodeJS implementation) - * - (When all else fails) - */ -/** only globals that common to node and browsers are allowed */ -// eslint-disable-next-line node/no-unsupported-features/es-builtins, no-undef -exports._globalThis = typeof globalThis === 'object' - ? globalThis - : typeof self === 'object' - ? self - : typeof window === 'object' - ? window - : typeof global === 'object' - ? global - : {}; -//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/browser/index.js b/node_modules/@opentelemetry/api/build/src/platform/browser/index.js deleted file mode 100644 index 99fd57c8a..000000000 --- a/node_modules/@opentelemetry/api/build/src/platform/browser/index.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -__exportStar(require("./globalThis"), exports); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/index.js b/node_modules/@opentelemetry/api/build/src/platform/index.js deleted file mode 100644 index 33b834dbf..000000000 --- a/node_modules/@opentelemetry/api/build/src/platform/index.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -__exportStar(require("./node"), exports); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js b/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js deleted file mode 100644 index 82c4e3949..000000000 --- a/node_modules/@opentelemetry/api/build/src/platform/node/globalThis.js +++ /dev/null @@ -1,22 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports._globalThis = void 0; -/** only globals that common to node and browsers are allowed */ -// eslint-disable-next-line node/no-unsupported-features/es-builtins -exports._globalThis = typeof globalThis === 'object' ? globalThis : global; -//# sourceMappingURL=globalThis.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/platform/node/index.js b/node_modules/@opentelemetry/api/build/src/platform/node/index.js deleted file mode 100644 index 99fd57c8a..000000000 --- a/node_modules/@opentelemetry/api/build/src/platform/node/index.js +++ /dev/null @@ -1,29 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __exportStar = (this && this.__exportStar) || function(m, exports) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -__exportStar(require("./globalThis"), exports); -//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation-api.js b/node_modules/@opentelemetry/api/build/src/propagation-api.js deleted file mode 100644 index f014fb4a0..000000000 --- a/node_modules/@opentelemetry/api/build/src/propagation-api.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.propagation = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const propagation_1 = require("./api/propagation"); -/** Entrypoint for propagation API */ -exports.propagation = propagation_1.PropagationAPI.getInstance(); -//# sourceMappingURL=propagation-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js b/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js deleted file mode 100644 index 3f395829c..000000000 --- a/node_modules/@opentelemetry/api/build/src/propagation/NoopTextMapPropagator.js +++ /dev/null @@ -1,34 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.NoopTextMapPropagator = void 0; -/** - * No-op implementations of {@link TextMapPropagator}. - */ -class NoopTextMapPropagator { - /** Noop inject function does nothing */ - inject(_context, _carrier) { } - /** Noop extract function does nothing and returns the input context */ - extract(context, _carrier) { - return context; - } - fields() { - return []; - } -} -exports.NoopTextMapPropagator = NoopTextMapPropagator; -//# sourceMappingURL=NoopTextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js b/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js deleted file mode 100644 index 513f33c62..000000000 --- a/node_modules/@opentelemetry/api/build/src/propagation/TextMapPropagator.js +++ /dev/null @@ -1,41 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.defaultTextMapSetter = exports.defaultTextMapGetter = void 0; -exports.defaultTextMapGetter = { - get(carrier, key) { - if (carrier == null) { - return undefined; - } - return carrier[key]; - }, - keys(carrier) { - if (carrier == null) { - return []; - } - return Object.keys(carrier); - }, -}; -exports.defaultTextMapSetter = { - set(carrier, key, value) { - if (carrier == null) { - return; - } - carrier[key] = value; - }, -}; -//# sourceMappingURL=TextMapPropagator.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace-api.js b/node_modules/@opentelemetry/api/build/src/trace-api.js deleted file mode 100644 index c8bbe93a8..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace-api.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.trace = void 0; -// Split module-level variable definition into separate files to allow -// tree-shaking on each api instance. -const trace_1 = require("./api/trace"); -/** Entrypoint for trace API */ -exports.trace = trace_1.TraceAPI.getInstance(); -//# sourceMappingURL=trace-api.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js b/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js deleted file mode 100644 index 44e913c70..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/NonRecordingSpan.js +++ /dev/null @@ -1,63 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.NonRecordingSpan = void 0; -const invalid_span_constants_1 = require("./invalid-span-constants"); -/** - * The NonRecordingSpan is the default {@link Span} that is used when no Span - * implementation is available. All operations are no-op including context - * propagation. - */ -class NonRecordingSpan { - constructor(_spanContext = invalid_span_constants_1.INVALID_SPAN_CONTEXT) { - this._spanContext = _spanContext; - } - // Returns a SpanContext. - spanContext() { - return this._spanContext; - } - // By default does nothing - setAttribute(_key, _value) { - return this; - } - // By default does nothing - setAttributes(_attributes) { - return this; - } - // By default does nothing - addEvent(_name, _attributes) { - return this; - } - // By default does nothing - setStatus(_status) { - return this; - } - // By default does nothing - updateName(_name) { - return this; - } - // By default does nothing - end(_endTime) { } - // isRecording always returns false for NonRecordingSpan. - isRecording() { - return false; - } - // By default does nothing - recordException(_exception, _time) { } -} -exports.NonRecordingSpan = NonRecordingSpan; -//# sourceMappingURL=NonRecordingSpan.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js b/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js deleted file mode 100644 index 0a823aa56..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/NoopTracer.js +++ /dev/null @@ -1,75 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.NoopTracer = void 0; -const context_1 = require("../api/context"); -const context_utils_1 = require("../trace/context-utils"); -const NonRecordingSpan_1 = require("./NonRecordingSpan"); -const spancontext_utils_1 = require("./spancontext-utils"); -const contextApi = context_1.ContextAPI.getInstance(); -/** - * No-op implementations of {@link Tracer}. - */ -class NoopTracer { - // startSpan starts a noop span. - startSpan(name, options, context = contextApi.active()) { - const root = Boolean(options === null || options === void 0 ? void 0 : options.root); - if (root) { - return new NonRecordingSpan_1.NonRecordingSpan(); - } - const parentFromContext = context && (0, context_utils_1.getSpanContext)(context); - if (isSpanContext(parentFromContext) && - (0, spancontext_utils_1.isSpanContextValid)(parentFromContext)) { - return new NonRecordingSpan_1.NonRecordingSpan(parentFromContext); - } - else { - return new NonRecordingSpan_1.NonRecordingSpan(); - } - } - startActiveSpan(name, arg2, arg3, arg4) { - let opts; - let ctx; - let fn; - if (arguments.length < 2) { - return; - } - else if (arguments.length === 2) { - fn = arg2; - } - else if (arguments.length === 3) { - opts = arg2; - fn = arg3; - } - else { - opts = arg2; - ctx = arg3; - fn = arg4; - } - const parentContext = ctx !== null && ctx !== void 0 ? ctx : contextApi.active(); - const span = this.startSpan(name, opts, parentContext); - const contextWithSpanSet = (0, context_utils_1.setSpan)(parentContext, span); - return contextApi.with(contextWithSpanSet, fn, undefined, span); - } -} -exports.NoopTracer = NoopTracer; -function isSpanContext(spanContext) { - return (typeof spanContext === 'object' && - typeof spanContext['spanId'] === 'string' && - typeof spanContext['traceId'] === 'string' && - typeof spanContext['traceFlags'] === 'number'); -} -//# sourceMappingURL=NoopTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js b/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js deleted file mode 100644 index c9e08d635..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/NoopTracerProvider.js +++ /dev/null @@ -1,32 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.NoopTracerProvider = void 0; -const NoopTracer_1 = require("./NoopTracer"); -/** - * An implementation of the {@link TracerProvider} which returns an impotent - * Tracer for all calls to `getTracer`. - * - * All operations are no-op. - */ -class NoopTracerProvider { - getTracer(_name, _version, _options) { - return new NoopTracer_1.NoopTracer(); - } -} -exports.NoopTracerProvider = NoopTracerProvider; -//# sourceMappingURL=NoopTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js deleted file mode 100644 index 667768010..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracer.js +++ /dev/null @@ -1,55 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ProxyTracer = void 0; -const NoopTracer_1 = require("./NoopTracer"); -const NOOP_TRACER = new NoopTracer_1.NoopTracer(); -/** - * Proxy tracer provided by the proxy tracer provider - */ -class ProxyTracer { - constructor(_provider, name, version, options) { - this._provider = _provider; - this.name = name; - this.version = version; - this.options = options; - } - startSpan(name, options, context) { - return this._getTracer().startSpan(name, options, context); - } - startActiveSpan(_name, _options, _context, _fn) { - const tracer = this._getTracer(); - return Reflect.apply(tracer.startActiveSpan, tracer, arguments); - } - /** - * Try to get a tracer from the proxy tracer provider. - * If the proxy tracer provider has no delegate, return a noop tracer. - */ - _getTracer() { - if (this._delegate) { - return this._delegate; - } - const tracer = this._provider.getDelegateTracer(this.name, this.version, this.options); - if (!tracer) { - return NOOP_TRACER; - } - this._delegate = tracer; - return this._delegate; - } -} -exports.ProxyTracer = ProxyTracer; -//# sourceMappingURL=ProxyTracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js b/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js deleted file mode 100644 index 75ec910de..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/ProxyTracerProvider.js +++ /dev/null @@ -1,54 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.ProxyTracerProvider = void 0; -const ProxyTracer_1 = require("./ProxyTracer"); -const NoopTracerProvider_1 = require("./NoopTracerProvider"); -const NOOP_TRACER_PROVIDER = new NoopTracerProvider_1.NoopTracerProvider(); -/** - * Tracer provider which provides {@link ProxyTracer}s. - * - * Before a delegate is set, tracers provided are NoOp. - * When a delegate is set, traces are provided from the delegate. - * When a delegate is set after tracers have already been provided, - * all tracers already provided will use the provided delegate implementation. - */ -class ProxyTracerProvider { - /** - * Get a {@link ProxyTracer} - */ - getTracer(name, version, options) { - var _a; - return ((_a = this.getDelegateTracer(name, version, options)) !== null && _a !== void 0 ? _a : new ProxyTracer_1.ProxyTracer(this, name, version, options)); - } - getDelegate() { - var _a; - return (_a = this._delegate) !== null && _a !== void 0 ? _a : NOOP_TRACER_PROVIDER; - } - /** - * Set the delegate tracer provider - */ - setDelegate(delegate) { - this._delegate = delegate; - } - getDelegateTracer(name, version, options) { - var _a; - return (_a = this._delegate) === null || _a === void 0 ? void 0 : _a.getTracer(name, version, options); - } -} -exports.ProxyTracerProvider = ProxyTracerProvider; -//# sourceMappingURL=ProxyTracerProvider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/Sampler.js b/node_modules/@opentelemetry/api/build/src/trace/Sampler.js deleted file mode 100644 index 6034482e4..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/Sampler.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=Sampler.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js b/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js deleted file mode 100644 index 6df6b3bf9..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/SamplingResult.js +++ /dev/null @@ -1,42 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SamplingDecision = void 0; -/** - * @deprecated use the one declared in @opentelemetry/sdk-trace-base instead. - * A sampling decision that determines how a {@link Span} will be recorded - * and collected. - */ -var SamplingDecision; -(function (SamplingDecision) { - /** - * `Span.isRecording() === false`, span will not be recorded and all events - * and attributes will be dropped. - */ - SamplingDecision[SamplingDecision["NOT_RECORD"] = 0] = "NOT_RECORD"; - /** - * `Span.isRecording() === true`, but `Sampled` flag in {@link TraceFlags} - * MUST NOT be set. - */ - SamplingDecision[SamplingDecision["RECORD"] = 1] = "RECORD"; - /** - * `Span.isRecording() === true` AND `Sampled` flag in {@link TraceFlags} - * MUST be set. - */ - SamplingDecision[SamplingDecision["RECORD_AND_SAMPLED"] = 2] = "RECORD_AND_SAMPLED"; -})(SamplingDecision = exports.SamplingDecision || (exports.SamplingDecision = {})); -//# sourceMappingURL=SamplingResult.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js b/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js deleted file mode 100644 index cb582305b..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/SpanOptions.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=SpanOptions.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/attributes.js b/node_modules/@opentelemetry/api/build/src/trace/attributes.js deleted file mode 100644 index c6eb97a3c..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/attributes.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=attributes.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/context-utils.js b/node_modules/@opentelemetry/api/build/src/trace/context-utils.js deleted file mode 100644 index d7e9c3a3b..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/context-utils.js +++ /dev/null @@ -1,82 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getSpanContext = exports.setSpanContext = exports.deleteSpan = exports.setSpan = exports.getActiveSpan = exports.getSpan = void 0; -const context_1 = require("../context/context"); -const NonRecordingSpan_1 = require("./NonRecordingSpan"); -const context_2 = require("../api/context"); -/** - * span key - */ -const SPAN_KEY = (0, context_1.createContextKey)('OpenTelemetry Context Key SPAN'); -/** - * Return the span if one exists - * - * @param context context to get span from - */ -function getSpan(context) { - return context.getValue(SPAN_KEY) || undefined; -} -exports.getSpan = getSpan; -/** - * Gets the span from the current context, if one exists. - */ -function getActiveSpan() { - return getSpan(context_2.ContextAPI.getInstance().active()); -} -exports.getActiveSpan = getActiveSpan; -/** - * Set the span on a context - * - * @param context context to use as parent - * @param span span to set active - */ -function setSpan(context, span) { - return context.setValue(SPAN_KEY, span); -} -exports.setSpan = setSpan; -/** - * Remove current span stored in the context - * - * @param context context to delete span from - */ -function deleteSpan(context) { - return context.deleteValue(SPAN_KEY); -} -exports.deleteSpan = deleteSpan; -/** - * Wrap span context in a NoopSpan and set as span in a new - * context - * - * @param context context to set active span on - * @param spanContext span context to be wrapped - */ -function setSpanContext(context, spanContext) { - return setSpan(context, new NonRecordingSpan_1.NonRecordingSpan(spanContext)); -} -exports.setSpanContext = setSpanContext; -/** - * Get the span context of the span if it exists. - * - * @param context context to get values from - */ -function getSpanContext(context) { - var _a; - return (_a = getSpan(context)) === null || _a === void 0 ? void 0 : _a.spanContext(); -} -exports.getSpanContext = getSpanContext; -//# sourceMappingURL=context-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js deleted file mode 100644 index 93c0289c1..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-impl.js +++ /dev/null @@ -1,103 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TraceStateImpl = void 0; -const tracestate_validators_1 = require("./tracestate-validators"); -const MAX_TRACE_STATE_ITEMS = 32; -const MAX_TRACE_STATE_LEN = 512; -const LIST_MEMBERS_SEPARATOR = ','; -const LIST_MEMBER_KEY_VALUE_SPLITTER = '='; -/** - * TraceState must be a class and not a simple object type because of the spec - * requirement (https://www.w3.org/TR/trace-context/#tracestate-field). - * - * Here is the list of allowed mutations: - * - New key-value pair should be added into the beginning of the list - * - The value of any key can be updated. Modified keys MUST be moved to the - * beginning of the list. - */ -class TraceStateImpl { - constructor(rawTraceState) { - this._internalState = new Map(); - if (rawTraceState) - this._parse(rawTraceState); - } - set(key, value) { - // TODO: Benchmark the different approaches(map vs list) and - // use the faster one. - const traceState = this._clone(); - if (traceState._internalState.has(key)) { - traceState._internalState.delete(key); - } - traceState._internalState.set(key, value); - return traceState; - } - unset(key) { - const traceState = this._clone(); - traceState._internalState.delete(key); - return traceState; - } - get(key) { - return this._internalState.get(key); - } - serialize() { - return this._keys() - .reduce((agg, key) => { - agg.push(key + LIST_MEMBER_KEY_VALUE_SPLITTER + this.get(key)); - return agg; - }, []) - .join(LIST_MEMBERS_SEPARATOR); - } - _parse(rawTraceState) { - if (rawTraceState.length > MAX_TRACE_STATE_LEN) - return; - this._internalState = rawTraceState - .split(LIST_MEMBERS_SEPARATOR) - .reverse() // Store in reverse so new keys (.set(...)) will be placed at the beginning - .reduce((agg, part) => { - const listMember = part.trim(); // Optional Whitespace (OWS) handling - const i = listMember.indexOf(LIST_MEMBER_KEY_VALUE_SPLITTER); - if (i !== -1) { - const key = listMember.slice(0, i); - const value = listMember.slice(i + 1, part.length); - if ((0, tracestate_validators_1.validateKey)(key) && (0, tracestate_validators_1.validateValue)(value)) { - agg.set(key, value); - } - else { - // TODO: Consider to add warning log - } - } - return agg; - }, new Map()); - // Because of the reverse() requirement, trunc must be done after map is created - if (this._internalState.size > MAX_TRACE_STATE_ITEMS) { - this._internalState = new Map(Array.from(this._internalState.entries()) - .reverse() // Use reverse same as original tracestate parse chain - .slice(0, MAX_TRACE_STATE_ITEMS)); - } - } - _keys() { - return Array.from(this._internalState.keys()).reverse(); - } - _clone() { - const traceState = new TraceStateImpl(); - traceState._internalState = new Map(this._internalState); - return traceState; - } -} -exports.TraceStateImpl = TraceStateImpl; -//# sourceMappingURL=tracestate-impl.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js b/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js deleted file mode 100644 index 3e370449b..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/internal/tracestate-validators.js +++ /dev/null @@ -1,46 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.validateValue = exports.validateKey = void 0; -const VALID_KEY_CHAR_RANGE = '[_0-9a-z-*/]'; -const VALID_KEY = `[a-z]${VALID_KEY_CHAR_RANGE}{0,255}`; -const VALID_VENDOR_KEY = `[a-z0-9]${VALID_KEY_CHAR_RANGE}{0,240}@[a-z]${VALID_KEY_CHAR_RANGE}{0,13}`; -const VALID_KEY_REGEX = new RegExp(`^(?:${VALID_KEY}|${VALID_VENDOR_KEY})$`); -const VALID_VALUE_BASE_REGEX = /^[ -~]{0,255}[!-~]$/; -const INVALID_VALUE_COMMA_EQUAL_REGEX = /,|=/; -/** - * Key is opaque string up to 256 characters printable. It MUST begin with a - * lowercase letter, and can only contain lowercase letters a-z, digits 0-9, - * underscores _, dashes -, asterisks *, and forward slashes /. - * For multi-tenant vendor scenarios, an at sign (@) can be used to prefix the - * vendor name. Vendors SHOULD set the tenant ID at the beginning of the key. - * see https://www.w3.org/TR/trace-context/#key - */ -function validateKey(key) { - return VALID_KEY_REGEX.test(key); -} -exports.validateKey = validateKey; -/** - * Value is opaque string up to 256 characters printable ASCII RFC0020 - * characters (i.e., the range 0x20 to 0x7E) except comma , and =. - */ -function validateValue(value) { - return (VALID_VALUE_BASE_REGEX.test(value) && - !INVALID_VALUE_COMMA_EQUAL_REGEX.test(value)); -} -exports.validateValue = validateValue; -//# sourceMappingURL=tracestate-validators.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js b/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js deleted file mode 100644 index 3d954190b..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/internal/utils.js +++ /dev/null @@ -1,24 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.createTraceState = void 0; -const tracestate_impl_1 = require("./tracestate-impl"); -function createTraceState(rawTraceState) { - return new tracestate_impl_1.TraceStateImpl(rawTraceState); -} -exports.createTraceState = createTraceState; -//# sourceMappingURL=utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js b/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js deleted file mode 100644 index 77fb79e97..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/invalid-span-constants.js +++ /dev/null @@ -1,27 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.INVALID_SPAN_CONTEXT = exports.INVALID_TRACEID = exports.INVALID_SPANID = void 0; -const trace_flags_1 = require("./trace_flags"); -exports.INVALID_SPANID = '0000000000000000'; -exports.INVALID_TRACEID = '00000000000000000000000000000000'; -exports.INVALID_SPAN_CONTEXT = { - traceId: exports.INVALID_TRACEID, - spanId: exports.INVALID_SPANID, - traceFlags: trace_flags_1.TraceFlags.NONE, -}; -//# sourceMappingURL=invalid-span-constants.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/link.js b/node_modules/@opentelemetry/api/build/src/trace/link.js deleted file mode 100644 index 8036a6346..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/link.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=link.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span.js b/node_modules/@opentelemetry/api/build/src/trace/span.js deleted file mode 100644 index b50af4627..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/span.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=span.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_context.js b/node_modules/@opentelemetry/api/build/src/trace/span_context.js deleted file mode 100644 index 4b7976ce7..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/span_context.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=span_context.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/span_kind.js b/node_modules/@opentelemetry/api/build/src/trace/span_kind.js deleted file mode 100644 index 9c06e2c25..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/span_kind.js +++ /dev/null @@ -1,46 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SpanKind = void 0; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var SpanKind; -(function (SpanKind) { - /** Default value. Indicates that the span is used internally. */ - SpanKind[SpanKind["INTERNAL"] = 0] = "INTERNAL"; - /** - * Indicates that the span covers server-side handling of an RPC or other - * remote request. - */ - SpanKind[SpanKind["SERVER"] = 1] = "SERVER"; - /** - * Indicates that the span covers the client-side wrapper around an RPC or - * other remote request. - */ - SpanKind[SpanKind["CLIENT"] = 2] = "CLIENT"; - /** - * Indicates that the span describes producer sending a message to a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["PRODUCER"] = 3] = "PRODUCER"; - /** - * Indicates that the span describes consumer receiving a message from a - * broker. Unlike client and server, there is no direct critical path latency - * relationship between producer and consumer spans. - */ - SpanKind[SpanKind["CONSUMER"] = 4] = "CONSUMER"; -})(SpanKind = exports.SpanKind || (exports.SpanKind = {})); -//# sourceMappingURL=span_kind.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js b/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js deleted file mode 100644 index dc88f5e6f..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/spancontext-utils.js +++ /dev/null @@ -1,49 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.wrapSpanContext = exports.isSpanContextValid = exports.isValidSpanId = exports.isValidTraceId = void 0; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -const invalid_span_constants_1 = require("./invalid-span-constants"); -const NonRecordingSpan_1 = require("./NonRecordingSpan"); -const VALID_TRACEID_REGEX = /^([0-9a-f]{32})$/i; -const VALID_SPANID_REGEX = /^[0-9a-f]{16}$/i; -function isValidTraceId(traceId) { - return VALID_TRACEID_REGEX.test(traceId) && traceId !== invalid_span_constants_1.INVALID_TRACEID; -} -exports.isValidTraceId = isValidTraceId; -function isValidSpanId(spanId) { - return VALID_SPANID_REGEX.test(spanId) && spanId !== invalid_span_constants_1.INVALID_SPANID; -} -exports.isValidSpanId = isValidSpanId; -/** - * Returns true if this {@link SpanContext} is valid. - * @return true if this {@link SpanContext} is valid. - */ -function isSpanContextValid(spanContext) { - return (isValidTraceId(spanContext.traceId) && isValidSpanId(spanContext.spanId)); -} -exports.isSpanContextValid = isSpanContextValid; -/** - * Wrap the given {@link SpanContext} in a new non-recording {@link Span} - * - * @param spanContext span context to be wrapped - * @returns a new non-recording {@link Span} with the provided context - */ -function wrapSpanContext(spanContext) { - return new NonRecordingSpan_1.NonRecordingSpan(spanContext); -} -exports.wrapSpanContext = wrapSpanContext; -//# sourceMappingURL=spancontext-utils.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/status.js b/node_modules/@opentelemetry/api/build/src/trace/status.js deleted file mode 100644 index 50cbdef8e..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/status.js +++ /dev/null @@ -1,23 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.SpanStatusCode = void 0; -/** - * An enumeration of status codes. - */ -var SpanStatusCode; -(function (SpanStatusCode) { - /** - * The default status. - */ - SpanStatusCode[SpanStatusCode["UNSET"] = 0] = "UNSET"; - /** - * The operation has been validated by an Application developer or - * Operator to have completed successfully. - */ - SpanStatusCode[SpanStatusCode["OK"] = 1] = "OK"; - /** - * The operation contains an error. - */ - SpanStatusCode[SpanStatusCode["ERROR"] = 2] = "ERROR"; -})(SpanStatusCode = exports.SpanStatusCode || (exports.SpanStatusCode = {})); -//# sourceMappingURL=status.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js b/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js deleted file mode 100644 index f8d4dd8a9..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/trace_flags.js +++ /dev/null @@ -1,26 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.TraceFlags = void 0; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -var TraceFlags; -(function (TraceFlags) { - /** Represents no flag set. */ - TraceFlags[TraceFlags["NONE"] = 0] = "NONE"; - /** Bit to represent whether trace is sampled in trace flags. */ - TraceFlags[TraceFlags["SAMPLED"] = 1] = "SAMPLED"; -})(TraceFlags = exports.TraceFlags || (exports.TraceFlags = {})); -//# sourceMappingURL=trace_flags.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/trace_state.js b/node_modules/@opentelemetry/api/build/src/trace/trace_state.js deleted file mode 100644 index 139703815..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/trace_state.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=trace_state.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer.js b/node_modules/@opentelemetry/api/build/src/trace/tracer.js deleted file mode 100644 index d710ef9ac..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/tracer.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=tracer.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js b/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js deleted file mode 100644 index 3547251a2..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/tracer_options.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=tracer_options.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js b/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js deleted file mode 100644 index 4c511db95..000000000 --- a/node_modules/@opentelemetry/api/build/src/trace/tracer_provider.js +++ /dev/null @@ -1,18 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -//# sourceMappingURL=tracer_provider.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/build/src/version.js b/node_modules/@opentelemetry/api/build/src/version.js deleted file mode 100644 index c36077498..000000000 --- a/node_modules/@opentelemetry/api/build/src/version.js +++ /dev/null @@ -1,21 +0,0 @@ -"use strict"; -/* - * Copyright The OpenTelemetry Authors - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -Object.defineProperty(exports, "__esModule", { value: true }); -exports.VERSION = void 0; -// this is autogenerated file, see scripts/version-update.js -exports.VERSION = '1.8.0'; -//# sourceMappingURL=version.js.map \ No newline at end of file diff --git a/node_modules/@opentelemetry/api/package.json b/node_modules/@opentelemetry/api/package.json deleted file mode 100644 index 081d2e02e..000000000 --- a/node_modules/@opentelemetry/api/package.json +++ /dev/null @@ -1,113 +0,0 @@ -{ - "name": "@opentelemetry/api", - "version": "1.8.0", - "description": "Public API for OpenTelemetry", - "main": "build/src/index.js", - "module": "build/esm/index.js", - "esnext": "build/esnext/index.js", - "types": "build/src/index.d.ts", - "browser": { - "./src/platform/index.ts": "./src/platform/browser/index.ts", - "./build/esm/platform/index.js": "./build/esm/platform/browser/index.js", - "./build/esnext/platform/index.js": "./build/esnext/platform/browser/index.js", - "./build/src/platform/index.js": "./build/src/platform/browser/index.js" - }, - "exports": { - ".": { - "module": "./build/esm/index.js", - "esnext": "./build/esnext/index.js", - "types": "./build/src/index.d.ts", - "default": "./build/src/index.js" - }, - "./experimental": { - "module": "./build/esm/experimental/index.js", - "esnext": "./build/esnext/experimental/index.js", - "types": "./build/src/experimental/index.d.ts", - "default": "./build/src/experimental/index.js" - } - }, - "repository": "open-telemetry/opentelemetry-js", - "scripts": { - "clean": "tsc --build --clean tsconfig.json tsconfig.esm.json tsconfig.esnext.json", - "codecov:browser": "nyc report --reporter=json && codecov -f coverage/*.json -p ../", - "codecov:webworker": "nyc report --reporter=json && codecov -f coverage/*.json -p ../", - "codecov": "nyc report --reporter=json && codecov -f coverage/*.json -p ../", - "precompile": "cross-var lerna run version --scope $npm_package_name --include-dependencies", - "compile": "tsc --build tsconfig.json tsconfig.esm.json tsconfig.esnext.json", - "docs": "typedoc", - "docs:deploy": "gh-pages --dist docs/out", - "docs:test": "linkinator docs/out --silent && linkinator docs/*.md *.md --markdown --silent", - "lint:fix": "eslint . --ext .ts --fix", - "lint": "eslint . --ext .ts", - "test:browser": "karma start --single-run", - "test": "nyc ts-mocha -p tsconfig.json 'test/**/*.test.ts'", - "test:eol": "ts-mocha -p tsconfig.json 'test/**/*.test.ts'", - "test:webworker": "karma start karma.worker.js --single-run", - "cycle-check": "dpdm --exit-code circular:1 src/index.ts", - "version": "node ../scripts/version-update.js", - "prewatch": "npm run precompile", - "watch": "tsc --build --watch tsconfig.json tsconfig.esm.json tsconfig.esnext.json", - "peer-api-check": "node ../scripts/peer-api-check.js" - }, - "keywords": [ - "opentelemetry", - "nodejs", - "browser", - "tracing", - "profiling", - "stats", - "monitoring" - ], - "author": "OpenTelemetry Authors", - "license": "Apache-2.0", - "engines": { - "node": ">=8.0.0" - }, - "files": [ - "build/esm/**/*.js", - "build/esm/**/*.js.map", - "build/esm/**/*.d.ts", - "build/esnext/**/*.js", - "build/esnext/**/*.js.map", - "build/esnext/**/*.d.ts", - "build/src/**/*.js", - "build/src/**/*.js.map", - "build/src/**/*.d.ts", - "LICENSE", - "README.md" - ], - "publishConfig": { - "access": "public" - }, - "devDependencies": { - "@types/mocha": "10.0.6", - "@types/node": "18.6.5", - "@types/sinon": "10.0.20", - "@types/webpack": "5.28.5", - "@types/webpack-env": "1.16.3", - "babel-plugin-istanbul": "6.1.1", - "codecov": "3.8.3", - "cross-var": "1.1.0", - "dpdm": "3.13.1", - "karma": "6.4.2", - "karma-chrome-launcher": "3.1.0", - "karma-coverage": "2.2.1", - "karma-mocha": "2.0.1", - "karma-mocha-webworker": "1.3.0", - "karma-spec-reporter": "0.0.36", - "karma-webpack": "4.0.2", - "lerna": "6.6.2", - "memfs": "3.5.3", - "mocha": "10.2.0", - "nyc": "15.1.0", - "sinon": "15.1.2", - "ts-loader": "8.4.0", - "ts-mocha": "10.0.0", - "typescript": "4.4.4", - "unionfs": "4.5.1", - "webpack": "5.89.0" - }, - "homepage": "https://github.com/open-telemetry/opentelemetry-js/tree/main/api", - "sideEffects": false, - "gitHead": "7be35c7845e206b27b682e8ce1cee850b09cec04" -} diff --git a/node_modules/@paulirish/trace_engine/analyze-trace.mjs b/node_modules/@paulirish/trace_engine/analyze-trace.mjs new file mode 100644 index 000000000..b17b7c716 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/analyze-trace.mjs @@ -0,0 +1,192 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. + +// Run this first: +// front_end/models/trace/build-trace-engine-lib.sh + +/* eslint-disable rulesdir/es_modules_import */ +import fs from 'node:fs'; +import zlib from 'node:zlib'; + +/** @typedef {import('../front_end/models/trace/trace.ts')} TraceEngine */ + +// For types... see Connor's manual hack here: +// https://github.com/GoogleChrome/lighthouse/pull/15703/files#diff-ec7e073cf0e6135d4f2af9bc04fe6100ec0df80ad1686bee2da53871be5f1a7b +// and https://github.com/GoogleChrome/lighthouse/pull/15703/files#diff-6dab4507247217209f5ab0f6c343ca2b00af1300878abba81fb74d51cdfbedf9 + +/** @type {TraceEngine} */ +import * as TraceEngine from './models/trace/trace.js'; + +polyfillDOMRect(); + +/** + * @param {string} filename + * @returns {Promise} + */ +export async function analyzeTrace(filename) { + const traceEvents = loadTraceEventsFromFile(filename); + const model = TraceEngine.TraceModel.Model.createWithAllHandlers( + TraceEngine.Types.Configuration.DEFAULT); // aka `fullTraceEngine` + await model.parse(traceEvents); + return model.traceParsedData(); +} + +// If run as CLI, parse the argv trace (or a fallback) +if (import.meta.url.endsWith(process?.argv[1])) { + cli(); +} + +async function cli() { + const filename = process.argv.at(2); + const TraceEngine = await analyzeTrace(filename); + console.log(TraceEngine); +} + + +/** + * @param {string=} filename + * @returns TraceEvent[] + */ +function loadTraceEventsFromFile(filename) { + const fileBuf = fs.readFileSync(filename); + let data; + if (isGzip(fileBuf)) { + data = zlib.gunzipSync(fileBuf); + } else { + data = fileBuf.toString('utf8'); + } + const json = JSON.parse(data); + const traceEvents = json.traceEvents ?? json; + console.assert(Array.isArray(traceEvents)); + return traceEvents; +} + +/** + * Read the first 3 bytes looking for the gzip signature in the file header + * https://www.rfc-editor.org/rfc/rfc1952#page-6 + * @param {ArrayBuffer} ab + * @returns boolean + */ +function isGzip(ab) { + const buf = new Uint8Array(ab); + if (!buf || buf.length < 3) { + return false; + } + return buf[0] === 0x1F && buf[1] === 0x8B && buf[2] === 0x08; +} + +export function polyfillDOMRect() { + // devtools assumes clientside :( + + // Everything else in here is the DOMRect polyfill + // https://raw.githubusercontent.com/JakeChampion/polyfill-library/master/polyfills/DOMRect/polyfill.js + + (function(global) { + function number(v) { + return v === undefined ? 0 : Number(v); + } + + function different(u, v) { + return u !== v && !(isNaN(u) && isNaN(v)); + } + + function DOMRect(xArg, yArg, wArg, hArg) { + let x, y, width, height, left, right, top, bottom; + + x = number(xArg); + y = number(yArg); + width = number(wArg); + height = number(hArg); + + Object.defineProperties(this, { + x: { + get: function() { + return x; + }, + set: function(newX) { + if (different(x, newX)) { + x = newX; + left = right = undefined; + } + }, + enumerable: true + }, + y: { + get: function() { + return y; + }, + set: function(newY) { + if (different(y, newY)) { + y = newY; + top = bottom = undefined; + } + }, + enumerable: true + }, + width: { + get: function() { + return width; + }, + set: function(newWidth) { + if (different(width, newWidth)) { + width = newWidth; + left = right = undefined; + } + }, + enumerable: true + }, + height: { + get: function() { + return height; + }, + set: function(newHeight) { + if (different(height, newHeight)) { + height = newHeight; + top = bottom = undefined; + } + }, + enumerable: true + }, + left: { + get: function() { + if (left === undefined) { + left = x + Math.min(0, width); + } + return left; + }, + enumerable: true + }, + right: { + get: function() { + if (right === undefined) { + right = x + Math.max(0, width); + } + return right; + }, + enumerable: true + }, + top: { + get: function() { + if (top === undefined) { + top = y + Math.min(0, height); + } + return top; + }, + enumerable: true + }, + bottom: { + get: function() { + if (bottom === undefined) { + bottom = y + Math.max(0, height); + } + return bottom; + }, + enumerable: true + } + }); + } + + globalThis.DOMRect = DOMRect; + })(globalThis); +} diff --git a/node_modules/@paulirish/trace_engine/core/platform/ArrayUtilities.js b/node_modules/@paulirish/trace_engine/core/platform/ArrayUtilities.js new file mode 100644 index 000000000..e49ad792e --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/ArrayUtilities.js @@ -0,0 +1,199 @@ +// Copyright (c) 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export const removeElement = (array, element, firstOnly) => { + let index = array.indexOf(element); + if (index === -1) { + return false; + } + if (firstOnly) { + array.splice(index, 1); + return true; + } + for (let i = index + 1, n = array.length; i < n; ++i) { + if (array[i] !== element) { + array[index++] = array[i]; + } + } + array.length = index; + return true; +}; +function swap(array, i1, i2) { + const temp = array[i1]; + array[i1] = array[i2]; + array[i2] = temp; +} +function partition(array, comparator, left, right, pivotIndex) { + const pivotValue = array[pivotIndex]; + swap(array, right, pivotIndex); + let storeIndex = left; + for (let i = left; i < right; ++i) { + if (comparator(array[i], pivotValue) < 0) { + swap(array, storeIndex, i); + ++storeIndex; + } + } + swap(array, right, storeIndex); + return storeIndex; +} +function quickSortRange(array, comparator, left, right, sortWindowLeft, sortWindowRight) { + if (right <= left) { + return; + } + const pivotIndex = Math.floor(Math.random() * (right - left)) + left; + const pivotNewIndex = partition(array, comparator, left, right, pivotIndex); + if (sortWindowLeft < pivotNewIndex) { + quickSortRange(array, comparator, left, pivotNewIndex - 1, sortWindowLeft, sortWindowRight); + } + if (pivotNewIndex < sortWindowRight) { + quickSortRange(array, comparator, pivotNewIndex + 1, right, sortWindowLeft, sortWindowRight); + } +} +export function sortRange(array, comparator, leftBound, rightBound, sortWindowLeft, sortWindowRight) { + if (leftBound === 0 && rightBound === (array.length - 1) && sortWindowLeft === 0 && sortWindowRight >= rightBound) { + array.sort(comparator); + } + else { + quickSortRange(array, comparator, leftBound, rightBound, sortWindowLeft, sortWindowRight); + } + return array; +} +export const binaryIndexOf = (array, value, comparator) => { + const index = lowerBound(array, value, comparator); + return index < array.length && comparator(value, array[index]) === 0 ? index : -1; +}; +function mergeOrIntersect(array1, array2, comparator, mergeNotIntersect) { + const result = []; + let i = 0; + let j = 0; + while (i < array1.length && j < array2.length) { + const compareValue = comparator(array1[i], array2[j]); + if (mergeNotIntersect || !compareValue) { + result.push(compareValue <= 0 ? array1[i] : array2[j]); + } + if (compareValue <= 0) { + i++; + } + if (compareValue >= 0) { + j++; + } + } + if (mergeNotIntersect) { + while (i < array1.length) { + result.push(array1[i++]); + } + while (j < array2.length) { + result.push(array2[j++]); + } + } + return result; +} +export const intersectOrdered = (array1, array2, comparator) => { + return mergeOrIntersect(array1, array2, comparator, false); +}; +export const mergeOrdered = (array1, array2, comparator) => { + return mergeOrIntersect(array1, array2, comparator, true); +}; +export const DEFAULT_COMPARATOR = (a, b) => { + return a < b ? -1 : (a > b ? 1 : 0); +}; +export function lowerBound(array, needle, comparator, left, right) { + let l = left || 0; + let r = right !== undefined ? right : array.length; + while (l < r) { + const m = (l + r) >> 1; + if (comparator(needle, array[m]) > 0) { + l = m + 1; + } + else { + r = m; + } + } + return r; +} +export function upperBound(array, needle, comparator, left, right) { + let l = left || 0; + let r = right !== undefined ? right : array.length; + while (l < r) { + const m = (l + r) >> 1; + if (comparator(needle, array[m]) >= 0) { + l = m + 1; + } + else { + r = m; + } + } + return r; +} +/** + * Obtains the first or last item in the array that satisfies the predicate function. + * So, for example, if the array were arr = [2, 4, 6, 8, 10], and you are looking for + * the last item arr[i] such that arr[i] < 5 you would be returned 1, because + * array[1] is 4, the last item in the array that satisfies the + * predicate function. + * + * If instead you were looking for the first item in the same array that satisfies + * arr[i] > 5 you would be returned 2 because array[2] = 6. + * + * Please note: this presupposes that the array is already ordered. + */ +function nearestIndex(arr, predicate, searchStart) { + const searchFromEnd = searchStart === "END" /* NearestSearchStart.END */; + if (arr.length === 0) { + return null; + } + let left = 0; + let right = arr.length - 1; + let pivot = 0; + let matchesPredicate = false; + let moveToTheRight = false; + let middle = 0; + do { + middle = left + (right - left) / 2; + pivot = searchFromEnd ? Math.ceil(middle) : Math.floor(middle); + matchesPredicate = predicate(arr[pivot]); + moveToTheRight = matchesPredicate === searchFromEnd; + if (moveToTheRight) { + left = Math.min(right, pivot + (left === pivot ? 1 : 0)); + } + else { + right = Math.max(left, pivot + (right === pivot ? -1 : 0)); + } + } while (right !== left); + // Special-case: the indexed item doesn't pass the predicate. This + // occurs when none of the items in the array are a match for the + // predicate. + if (!predicate(arr[left])) { + return null; + } + return left; +} +/** + * Obtains the first item in the array that satisfies the predicate function. + * So, for example, if the array was arr = [2, 4, 6, 8, 10], and you are looking for + * the first item arr[i] such that arr[i] > 5 you would be returned 2, because + * array[2] is 6, the first item in the array that satisfies the + * predicate function. + * + * Please note: this presupposes that the array is already ordered. + */ +export function nearestIndexFromBeginning(arr, predicate) { + return nearestIndex(arr, predicate, "BEGINNING" /* NearestSearchStart.BEGINNING */); +} +/** + * Obtains the last item in the array that satisfies the predicate function. + * So, for example, if the array was arr = [2, 4, 6, 8, 10], and you are looking for + * the last item arr[i] such that arr[i] < 5 you would be returned 1, because + * arr[1] is 4, the last item in the array that satisfies the + * predicate function. + * + * Please note: this presupposes that the array is already ordered. + */ +export function nearestIndexFromEnd(arr, predicate) { + return nearestIndex(arr, predicate, "END" /* NearestSearchStart.END */); +} +// Type guard for ensuring that `arr` does not contain null or undefined +export function arrayDoesNotContainNullOrUndefined(arr) { + return !arr.includes(null) && !arr.includes(undefined); +} +//# sourceMappingURL=ArrayUtilities.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/Brand.js b/node_modules/@paulirish/trace_engine/core/platform/Brand.js new file mode 100644 index 000000000..817c8c979 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/Brand.js @@ -0,0 +1,5 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export {}; +//# sourceMappingURL=Brand.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/DOMUtilities.js b/node_modules/@paulirish/trace_engine/core/platform/DOMUtilities.js new file mode 100644 index 000000000..ed32e1f0f --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/DOMUtilities.js @@ -0,0 +1,109 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +/** + * `document.activeElement` will not enter shadow roots to find the element + * that has focus; use this method if you need to traverse through any shadow + * roots to find the actual, specific focused element. + */ +export function deepActiveElement(doc) { + let activeElement = doc.activeElement; + while (activeElement && activeElement.shadowRoot && activeElement.shadowRoot.activeElement) { + activeElement = activeElement.shadowRoot.activeElement; + } + return activeElement; +} +export function getEnclosingShadowRootForNode(node) { + let parentNode = node.parentNodeOrShadowHost(); + while (parentNode) { + if (parentNode instanceof ShadowRoot) { + return parentNode; + } + parentNode = parentNode.parentNodeOrShadowHost(); + } + return null; +} +export function rangeOfWord(rootNode, offset, stopCharacters, stayWithinNode, direction) { + let startNode; + let startOffset = 0; + let endNode; + let endOffset = 0; + if (!stayWithinNode) { + stayWithinNode = rootNode; + } + if (!direction || direction === 'backward' || direction === 'both') { + let node = rootNode; + while (node) { + if (node === stayWithinNode) { + if (!startNode) { + startNode = stayWithinNode; + } + break; + } + if (node.nodeType === Node.TEXT_NODE && node.nodeValue !== null) { + const start = (node === rootNode ? (offset - 1) : (node.nodeValue.length - 1)); + for (let i = start; i >= 0; --i) { + if (stopCharacters.indexOf(node.nodeValue[i]) !== -1) { + startNode = node; + startOffset = i + 1; + break; + } + } + } + if (startNode) { + break; + } + node = node.traversePreviousNode(stayWithinNode); + } + if (!startNode) { + startNode = stayWithinNode; + startOffset = 0; + } + } + else { + startNode = rootNode; + startOffset = offset; + } + if (!direction || direction === 'forward' || direction === 'both') { + let node = rootNode; + while (node) { + if (node === stayWithinNode) { + if (!endNode) { + endNode = stayWithinNode; + } + break; + } + if (node.nodeType === Node.TEXT_NODE && node.nodeValue !== null) { + const start = (node === rootNode ? offset : 0); + for (let i = start; i < node.nodeValue.length; ++i) { + if (stopCharacters.indexOf(node.nodeValue[i]) !== -1) { + endNode = node; + endOffset = i; + break; + } + } + } + if (endNode) { + break; + } + node = node.traverseNextNode(stayWithinNode); + } + if (!endNode) { + endNode = stayWithinNode; + endOffset = stayWithinNode.nodeType === Node.TEXT_NODE ? stayWithinNode.nodeValue?.length || 0 : + stayWithinNode.childNodes.length; + } + } + else { + endNode = rootNode; + endOffset = offset; + } + if (!rootNode.ownerDocument) { + throw new Error('No `ownerDocument` found for rootNode'); + } + const result = rootNode.ownerDocument.createRange(); + result.setStart(startNode, startOffset); + result.setEnd(endNode, endOffset); + return result; +} +//# sourceMappingURL=DOMUtilities.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/DateUtilities.js b/node_modules/@paulirish/trace_engine/core/platform/DateUtilities.js new file mode 100644 index 000000000..34723f0fa --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/DateUtilities.js @@ -0,0 +1,14 @@ +// Copyright (c) 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export const isValid = (date) => { + return !isNaN(date.getTime()); +}; +export const toISO8601Compact = (date) => { + function leadZero(x) { + return (x > 9 ? '' : '0') + x; + } + return date.getFullYear() + leadZero(date.getMonth() + 1) + leadZero(date.getDate()) + 'T' + + leadZero(date.getHours()) + leadZero(date.getMinutes()) + leadZero(date.getSeconds()); +}; +//# sourceMappingURL=DateUtilities.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/DevToolsPath.js b/node_modules/@paulirish/trace_engine/core/platform/DevToolsPath.js new file mode 100644 index 000000000..5fcefb583 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/DevToolsPath.js @@ -0,0 +1,7 @@ +// Copyright 2021 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export const EmptyUrlString = ''; +export const EmptyRawPathString = ''; +export const EmptyEncodedPathString = ''; +//# sourceMappingURL=DevToolsPath.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/KeyboardUtilities.js b/node_modules/@paulirish/trace_engine/core/platform/KeyboardUtilities.js new file mode 100644 index 000000000..d1073a651 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/KeyboardUtilities.js @@ -0,0 +1,22 @@ +// Copyright (c) 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export const ENTER_KEY = 'Enter'; +export const ESCAPE_KEY = 'Escape'; +export const TAB_KEY = 'Tab'; +export const ARROW_KEYS = new Set([ + "ArrowUp" /* ArrowKey.UP */, + "ArrowDown" /* ArrowKey.DOWN */, + "ArrowLeft" /* ArrowKey.LEFT */, + "ArrowRight" /* ArrowKey.RIGHT */, +]); +export function keyIsArrowKey(key) { + return ARROW_KEYS.has(key); +} +export function isEscKey(event) { + return event.key === 'Escape'; +} +export function isEnterOrSpaceKey(event) { + return event.key === 'Enter' || event.key === ' '; +} +//# sourceMappingURL=KeyboardUtilities.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/MapUtilities.js b/node_modules/@paulirish/trace_engine/core/platform/MapUtilities.js new file mode 100644 index 000000000..72d9be90b --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/MapUtilities.js @@ -0,0 +1,79 @@ +// Copyright (c) 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export const inverse = function (map) { + const result = new Multimap(); + for (const [key, value] of map.entries()) { + result.set(value, key); + } + return result; +}; +export class Multimap { + map = new Map(); + set(key, value) { + let set = this.map.get(key); + if (!set) { + set = new Set(); + this.map.set(key, set); + } + set.add(value); + } + get(key) { + return this.map.get(key) || new Set(); + } + has(key) { + return this.map.has(key); + } + hasValue(key, value) { + const set = this.map.get(key); + if (!set) { + return false; + } + return set.has(value); + } + get size() { + return this.map.size; + } + delete(key, value) { + const values = this.get(key); + if (!values) { + return false; + } + const result = values.delete(value); + if (!values.size) { + this.map.delete(key); + } + return result; + } + deleteAll(key) { + this.map.delete(key); + } + keysArray() { + return [...this.map.keys()]; + } + keys() { + return this.map.keys(); + } + valuesArray() { + const result = []; + for (const set of this.map.values()) { + result.push(...set.values()); + } + return result; + } + clear() { + this.map.clear(); + } +} +/** + * Gets value for key, assigning a default if value is falsy. + */ +export function getWithDefault(map, key, defaultValueFactory) { + let value = map.get(key); + if (!value) { + value = defaultValueFactory(key); + map.set(key, value); + } + return value; +} +//# sourceMappingURL=MapUtilities.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/MimeType.js b/node_modules/@paulirish/trace_engine/core/platform/MimeType.js new file mode 100644 index 000000000..0620c11cc --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/MimeType.js @@ -0,0 +1,138 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +/** + * MIME types other than the ones with the "text" type that have text content. + */ +const ADDITIONAL_TEXT_MIME_TYPES = new Set([ + 'application/ecmascript', + 'application/javascript', + 'application/json', + 'application/json+protobuf', + 'application/vnd.dart', + 'application/xml', + 'application/x-aspx', + 'application/x-javascript', + 'application/x-jsp', + 'application/x-httpd-php', +]); +/** + * @returns true iff `mimeType` has textual content. Concretely we return true if: + * - `mimeType` starts with "text/" + * - `mimeType` ends with "+json" or "+xml" + * - if `mimeType` is one of a predefined list textual mime types. + */ +export function isTextType(mimeType) { + return mimeType.startsWith('text/') || mimeType.endsWith('+json') || mimeType.endsWith('+xml') || + ADDITIONAL_TEXT_MIME_TYPES.has(mimeType); +} +/** + * Port of net::HttpUtils::ParseContentType to extract mimeType and charset from + * the 'Content-Type' header. + */ +export function parseContentType(contentType) { + if (contentType === '*/*') { + return { mimeType: null, charset: null }; + } + const { mimeType, params } = parseMimeType(contentType); + const charset = params.get('charset')?.toLowerCase().trim() ?? null; + return { mimeType, charset }; +} +function parseMimeType(contentType) { + // Remove any leading and trailing whitespace. Note that String.prototype.trim removes a lot more + // than what the spec considers whitespace. We are fine with that. + contentType = contentType.trim(); + // The mimetype is basically everything until the first ';' (but trimmed). + let mimeTypeEnd = findFirstIndexOf(contentType, ' \t;('); + if (mimeTypeEnd < 0) { + mimeTypeEnd = contentType.length; + } + const slashPos = contentType.indexOf('/'); + if (slashPos < 0 || slashPos > mimeTypeEnd) { + return { mimeType: null, params: new Map() }; + } + const mimeType = contentType.substring(0, mimeTypeEnd).toLowerCase(); + // Iterate over parameters. We can't split the string around semicolons because quoted + // strings may include semicolons. + const params = new Map(); + let offset = contentType.indexOf(';', mimeTypeEnd); + while (offset >= 0 && offset < contentType.length) { + // Trim off the semicolon. + ++offset; + // Trim off whitespace + offset = findFirstIndexNotOf(contentType, ' \t', offset); + if (offset < 0) { + continue; + } + const paramNameStart = offset; + // Extend parameter name until we run into semicolon or equals sign. + offset = findFirstIndexOf(contentType, ';=', offset); + if (offset < 0 || contentType[offset] === ';') { + // Nothing more to do if no more input or there is no parameter value. + continue; + } + const paramName = contentType.substring(paramNameStart, offset).toLowerCase(); + // Trim off the '='. + ++offset; + // Trim off whitespace. + offset = findFirstIndexNotOf(contentType, ' \t', offset); + let paramValue = ''; + if (offset < 0 || contentType[offset] === ';') { + // Nothing to do here: the value is an unquoted string of only whitespace. + continue; + } + else if (contentType[offset] !== '"') { + // Not a quote so we can copy the value as-is. + const valueStart = offset; + offset = contentType.indexOf(';', offset); + const valueEnd = offset >= 0 ? offset : contentType.length; + paramValue = contentType.substring(valueStart, valueEnd).trimEnd(); + } + else { + // Otherwise append data with special handling for backslashes, until a close quote. + // Don't trim whitespace for quoted strings. + // Trim off the opening quote '"' + ++offset; + while (offset < contentType.length && contentType[offset] !== '"') { + // Skip over backslash and append the next character, when not at the end + // of the string. Otherwise, copy the next character (which may be a backslash). + if (contentType[offset] === '\\' && offset + 1 < contentType.length) { + ++offset; + } + paramValue += contentType[offset]; + ++offset; + } + offset = contentType.indexOf(';', offset); + } + if (!params.has(paramName)) { + // The first one wins! + params.set(paramName, paramValue); + } + } + return { mimeType, params }; +} +/** + * @returns the smallest index of any character in 'characters' or -1 if none of + * the characters occur in 'searchString' + */ +function findFirstIndexOf(searchString, characters, pos = 0) { + for (let i = pos; i < searchString.length; i++) { + if (characters.includes(searchString[i])) { + return i; + } + } + return -1; +} +/** + * @returns the smallest index of any character not in 'characters' or -1 if only + * 'characters' occur in 'searchString' + */ +function findFirstIndexNotOf(searchString, characters, pos = 0) { + for (let i = pos; i < searchString.length; i++) { + if (!characters.includes(searchString[i])) { + return i; + } + } + return -1; +} +//# sourceMappingURL=MimeType.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/NumberUtilities.js b/node_modules/@paulirish/trace_engine/core/platform/NumberUtilities.js new file mode 100644 index 000000000..2b1a8906d --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/NumberUtilities.js @@ -0,0 +1,82 @@ +// Copyright (c) 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export const clamp = (num, min, max) => { + let clampedNumber = num; + if (num < min) { + clampedNumber = min; + } + else if (num > max) { + clampedNumber = max; + } + return clampedNumber; +}; +export const mod = (m, n) => { + return ((m % n) + n) % n; +}; +export const bytesToString = (bytes) => { + if (bytes < 1000) { + return `${bytes.toFixed(0)}\xA0B`; + } + const kilobytes = bytes / 1000; + if (kilobytes < 100) { + return `${kilobytes.toFixed(1)}\xA0kB`; + } + if (kilobytes < 1000) { + return `${kilobytes.toFixed(0)}\xA0kB`; + } + const megabytes = kilobytes / 1000; + if (megabytes < 100) { + return `${megabytes.toFixed(1)}\xA0MB`; + } + return `${megabytes.toFixed(0)}\xA0MB`; +}; +export const toFixedIfFloating = (value) => { + if (!value || Number.isNaN(Number(value))) { + return value; + } + const number = Number(value); + return number % 1 ? number.toFixed(3) : String(number); +}; +/** + * Rounds a number (including float) down. + */ +export const floor = (value, precision = 0) => { + const mult = Math.pow(10, precision); + return Math.floor(value * mult) / mult; +}; +/** + * Computes the great common divisor for two numbers. + * If the numbers are floats, they will be rounded to an integer. + */ +export const greatestCommonDivisor = (a, b) => { + a = Math.round(a); + b = Math.round(b); + while (b !== 0) { + const t = b; + b = a % b; + a = t; + } + return a; +}; +const commonRatios = new Map([ + ['8∶5', '16∶10'], +]); +export const aspectRatio = (width, height) => { + const divisor = greatestCommonDivisor(width, height); + if (divisor !== 0) { + width /= divisor; + height /= divisor; + } + const result = `${width}∶${height}`; + return commonRatios.get(result) || result; +}; +export const withThousandsSeparator = function (num) { + let str = String(num); + const re = /(\d+)(\d{3})/; + while (str.match(re)) { + str = str.replace(re, '$1\xA0$2'); + } // \xa0 is a non-breaking space + return str; +}; +//# sourceMappingURL=NumberUtilities.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/PromiseUtilities.js b/node_modules/@paulirish/trace_engine/core/platform/PromiseUtilities.js new file mode 100644 index 000000000..d860f5fca --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/PromiseUtilities.js @@ -0,0 +1,18 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +/** + * Returns a new pending promise together with it's resolve and reject functions. + * + * Polyfill for https://github.com/tc39/proposal-promise-with-resolvers. + */ +export function promiseWithResolvers() { + let resolve; + let reject; + const promise = new Promise((res, rej) => { + resolve = res; + reject = rej; + }); + return { promise, resolve, reject }; +} +//# sourceMappingURL=PromiseUtilities.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/SetUtilities.js b/node_modules/@paulirish/trace_engine/core/platform/SetUtilities.js new file mode 100644 index 000000000..f936980da --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/SetUtilities.js @@ -0,0 +1,23 @@ +// Copyright (c) 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export const addAll = function (set, iterable) { + for (const item of iterable) { + set.add(item); + } +}; +export const isEqual = function (setA, setB) { + if (setA === setB) { + return true; + } + if (setA.size !== setB.size) { + return false; + } + for (const item of setA) { + if (!setB.has(item)) { + return false; + } + } + return true; +}; +//# sourceMappingURL=SetUtilities.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/StringUtilities.js b/node_modules/@paulirish/trace_engine/core/platform/StringUtilities.js new file mode 100644 index 000000000..aaa3f3f57 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/StringUtilities.js @@ -0,0 +1,513 @@ +// Copyright (c) 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export const escapeCharacters = (inputString, charsToEscape) => { + let foundChar = false; + for (let i = 0; i < charsToEscape.length; ++i) { + if (inputString.indexOf(charsToEscape.charAt(i)) !== -1) { + foundChar = true; + break; + } + } + if (!foundChar) { + return String(inputString); + } + let result = ''; + for (let i = 0; i < inputString.length; ++i) { + if (charsToEscape.indexOf(inputString.charAt(i)) !== -1) { + result += '\\'; + } + result += inputString.charAt(i); + } + return result; +}; +const toHexadecimal = (charCode, padToLength) => { + return charCode.toString(16).toUpperCase().padStart(padToLength, '0'); +}; +// Remember to update the third group in the regexps patternsToEscape and +// patternsToEscapePlusSingleQuote when adding new entries in this map. +const escapedReplacements = new Map([ + ['\b', '\\b'], + ['\f', '\\f'], + ['\n', '\\n'], + ['\r', '\\r'], + ['\t', '\\t'], + ['\v', '\\v'], + ['\'', '\\\''], + ['\\', '\\\\'], + ['<------------2-----------> <---------3--------> <-----4----> <------5-----> <-----6----> <7> +// 1: two or more consecutive uppercase letters. This is useful for identifying acronyms +// 2: lookahead assertion that matches a word boundary +// 3: numeronym: single letter followed by number and another letter +// 4: word starting with an optional uppercase letter +// 5: single digit followed by word to handle '3D' or '2px' (this might be controverial) +// 6: single uppercase letter or number +// 7: a dot character. We extract it into a separate word and remove dashes around it later. +// This is makes more sense conceptually and allows accounting for all possible word variants. +// Making dot a part of a word prevent us from handling acronyms or numeronyms after the word +// correctly without making the RegExp prohibitively complicated. +// https://regex101.com/r/FhMVKc/1 +export const toKebabCase = function (input) { + return (input.match?.(WORD)?.map(w => w.toLowerCase()).join('-').replaceAll('-.-', '.') || input); +}; +/* eslint-disable @typescript-eslint/no-explicit-any */ +export function toKebabCaseKeys(settingValue) { + const result = {}; + for (const [key, value] of Object.entries(settingValue)) { + result[toKebabCase(key)] = value; + } + return result; +} +/* eslint-enable @typescript-eslint/no-explicit-any */ +// Replaces the last ocurrence of parameter `search` with parameter `replacement` in `input` +export const replaceLast = function (input, search, replacement) { + const replacementStartIndex = input.lastIndexOf(search); + if (replacementStartIndex === -1) { + return input; + } + return input.slice(0, replacementStartIndex) + input.slice(replacementStartIndex).replace(search, replacement); +}; +export const stringifyWithPrecision = function stringifyWithPrecision(s, precision = 2) { + if (precision === 0) { + return s.toFixed(0); + } + const string = s.toFixed(precision).replace(/\.?0*$/, ''); + return string === '-0' ? '0' : string; +}; +/** + * Somewhat efficiently concatenates 2 base64 encoded strings. + */ +export const concatBase64 = function (lhs, rhs) { + if (lhs.length === 0 || !lhs.endsWith('=')) { + // Empty string or no padding, we can straight-up concatenate. + return lhs + rhs; + } + const lhsLeaveAsIs = lhs.substring(0, lhs.length - 4); + const lhsToDecode = lhs.substring(lhs.length - 4); + return lhsLeaveAsIs + window.btoa(window.atob(lhsToDecode) + window.atob(rhs)); +}; +//# sourceMappingURL=StringUtilities.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/Timing.js b/node_modules/@paulirish/trace_engine/core/platform/Timing.js new file mode 100644 index 000000000..405d8c373 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/Timing.js @@ -0,0 +1,13 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export function secondsToMilliSeconds(x) { + return (x * 1000); +} +export function milliSecondsToSeconds(x) { + return (x / 1000); +} +export function microSecondsToMilliSeconds(x) { + return (x / 1000); +} +//# sourceMappingURL=Timing.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/TypedArrayUtilities.js b/node_modules/@paulirish/trace_engine/core/platform/TypedArrayUtilities.js new file mode 100644 index 000000000..12e8d675e --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/TypedArrayUtilities.js @@ -0,0 +1,111 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +/** + * @returns A BigUint32Array implementation which is based on Array. + * This means that its length automatically expands to include the highest index + * used, and asArrayOrFail will succeed. + */ +export function createExpandableBigUint32Array() { + return new ExpandableBigUint32ArrayImpl(); +} +/** + * @returns A BigUint32Array implementation which is based on Uint32Array. + * If the length is small enough to fit in a single Uint32Array, then + * asUint32ArrayOrFail will succeed. Otherwise, it will throw an exception. + */ +export function createFixedBigUint32Array(length, maxLengthForTesting) { + try { + if (maxLengthForTesting !== undefined && length > maxLengthForTesting) { + // Simulate allocation failure. + throw new RangeError(); + } + return new BasicBigUint32ArrayImpl(length); + } + catch { + // We couldn't allocate a big enough ArrayBuffer. + return new SplitBigUint32ArrayImpl(length, maxLengthForTesting); + } +} +class BasicBigUint32ArrayImpl extends Uint32Array { + getValue(index) { + return this[index]; + } + setValue(index, value) { + this[index] = value; + } + asUint32ArrayOrFail() { + return this; + } + asArrayOrFail() { + throw new Error('Not an array'); + } +} +class SplitBigUint32ArrayImpl { + #data; + #partLength; + length; + constructor(length, maxLengthForTesting) { + this.#data = []; + this.length = length; + let partCount = 1; + while (true) { + partCount *= 2; + this.#partLength = Math.ceil(length / partCount); + try { + if (maxLengthForTesting !== undefined && this.#partLength > maxLengthForTesting) { + // Simulate allocation failure. + throw new RangeError(); + } + for (let i = 0; i < partCount; ++i) { + this.#data[i] = new Uint32Array(this.#partLength); + } + return; + } + catch (e) { + if (this.#partLength < 1e6) { + // The length per part is already small, so continuing to subdivide it + // will probably not help. + throw e; + } + } + } + } + getValue(index) { + if (index >= 0 && index < this.length) { + const partLength = this.#partLength; + return this.#data[Math.floor(index / partLength)][index % partLength]; + } + // On out-of-bounds accesses, match the behavior of Uint32Array: return an + // undefined value that's incorrectly typed as number. + return this.#data[0][-1]; + } + setValue(index, value) { + if (index >= 0 && index < this.length) { + const partLength = this.#partLength; + this.#data[Math.floor(index / partLength)][index % partLength] = value; + } + // Attempting to set a value out of bounds does nothing, like Uint32Array. + } + asUint32ArrayOrFail() { + throw new Error('Not a Uint32Array'); + } + asArrayOrFail() { + throw new Error('Not an array'); + } +} +class ExpandableBigUint32ArrayImpl extends Array { + getValue(index) { + return this[index]; + } + setValue(index, value) { + this[index] = value; + } + asUint32ArrayOrFail() { + throw new Error('Not a Uint32Array'); + } + asArrayOrFail() { + return this; + } +} +//# sourceMappingURL=TypedArrayUtilities.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/TypescriptUtilities.js b/node_modules/@paulirish/trace_engine/core/platform/TypescriptUtilities.js new file mode 100644 index 000000000..9d91df502 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/TypescriptUtilities.js @@ -0,0 +1,25 @@ +// Copyright 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +/** + * This is useful to keep TypeScript happy in a test - if you have a value + * that's potentially `null` you can use this function to assert that it isn't, + * and satisfy TypeScript that the value is present. + */ +export function assertNotNullOrUndefined(val, message) { + if (val === null || val === undefined) { + throw new Error(`Expected given value to not be null/undefined but it was: ${val}${message ? `\n${message}` : ''}`); + } +} +export function assertNever(type, message) { + throw new Error(message); +} +/** + * This is useful to check on the type-level that the unhandled cases of + * a switch are exactly `T` (where T is usually a union type of enum values). + * @param caseVariable + */ +export function assertUnhandled(_caseVariable) { + return _caseVariable; +} +//# sourceMappingURL=TypescriptUtilities.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/UIString.js b/node_modules/@paulirish/trace_engine/core/platform/UIString.js new file mode 100644 index 000000000..30c9c3d56 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/UIString.js @@ -0,0 +1,5 @@ +// Copyright 2021 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export const LocalizedEmptyString = ''; +//# sourceMappingURL=UIString.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/UserVisibleError.js b/node_modules/@paulirish/trace_engine/core/platform/UserVisibleError.js new file mode 100644 index 000000000..d4cc51312 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/UserVisibleError.js @@ -0,0 +1,23 @@ +// Copyright 2021 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +/** + * Represents an error that might become visible to the user. Where errors + * might be surfaced to the user (such as by displaying the message to the + * console), this class should be used to enforce that the message is + * localized on the way in. + */ +export class UserVisibleError extends Error { + message; + constructor(message) { + super(message); + this.message = message; + } +} +export function isUserVisibleError(error) { + if (typeof error === 'object' && error !== null) { + return error instanceof UserVisibleError; + } + return false; +} +//# sourceMappingURL=UserVisibleError.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/bundle-tsconfig.json b/node_modules/@paulirish/trace_engine/core/platform/bundle-tsconfig.json new file mode 100644 index 000000000..63648e797 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/bundle-tsconfig.json @@ -0,0 +1 @@ +{"compilerOptions":{"composite":true,"outDir":".","baseUrl":".","rootDir":"../../../../../../front_end/core/platform"},"files":["../../../../../../front_end/core/platform/platform.ts"],"references":[{"path":"./platform-tsconfig.json"}]} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/devtools_entrypoint-bundle-typescript-tsconfig.json b/node_modules/@paulirish/trace_engine/core/platform/devtools_entrypoint-bundle-typescript-tsconfig.json new file mode 100644 index 000000000..e6c3f65dd --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/devtools_entrypoint-bundle-typescript-tsconfig.json @@ -0,0 +1,43 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../front_end/core/platform", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "devtools_entrypoint-bundle-typescript-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../front_end/core/platform/platform.ts", + "../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "./platform-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/platform-tsconfig.json b/node_modules/@paulirish/trace_engine/core/platform/platform-tsconfig.json new file mode 100644 index 000000000..6b969bed2 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/platform-tsconfig.json @@ -0,0 +1,54 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../front_end/core/platform", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "platform-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../front_end/core/platform/ArrayUtilities.ts", + "../../../../../../front_end/core/platform/Brand.ts", + "../../../../../../front_end/core/platform/DOMUtilities.ts", + "../../../../../../front_end/core/platform/DateUtilities.ts", + "../../../../../../front_end/core/platform/DevToolsPath.ts", + "../../../../../../front_end/core/platform/KeyboardUtilities.ts", + "../../../../../../front_end/core/platform/MapUtilities.ts", + "../../../../../../front_end/core/platform/MimeType.ts", + "../../../../../../front_end/core/platform/NumberUtilities.ts", + "../../../../../../front_end/core/platform/PromiseUtilities.ts", + "../../../../../../front_end/core/platform/SetUtilities.ts", + "../../../../../../front_end/core/platform/StringUtilities.ts", + "../../../../../../front_end/core/platform/Timing.ts", + "../../../../../../front_end/core/platform/TypedArrayUtilities.ts", + "../../../../../../front_end/core/platform/TypescriptUtilities.ts", + "../../../../../../front_end/core/platform/UIString.ts", + "../../../../../../front_end/core/platform/UserVisibleError.ts", + "../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../node_modules/@types/filesystem/index.d.ts" + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/core/platform/platform.js b/node_modules/@paulirish/trace_engine/core/platform/platform.js new file mode 100644 index 000000000..25157e6cc --- /dev/null +++ b/node_modules/@paulirish/trace_engine/core/platform/platform.js @@ -0,0 +1,54 @@ +/* + * Copyright (C) 2019 Google Inc. All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above + * copyright notice, this list of conditions and the following disclaimer + * in the documentation and/or other materials provided with the + * distribution. + * * Neither the name of Google Inc. nor the names of its + * contributors may be used to endorse or promote products derived from + * this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ +import * as ArrayUtilities from './ArrayUtilities.js'; +import * as Brand from './Brand.js'; +import * as DateUtilities from './DateUtilities.js'; +import * as DevToolsPath from './DevToolsPath.js'; +import * as DOMUtilities from './DOMUtilities.js'; +import * as KeyboardUtilities from './KeyboardUtilities.js'; +import * as MapUtilities from './MapUtilities.js'; +import * as MimeType from './MimeType.js'; +import * as NumberUtilities from './NumberUtilities.js'; +import * as PromiseUtilities from './PromiseUtilities.js'; +import * as SetUtilities from './SetUtilities.js'; +import * as StringUtilities from './StringUtilities.js'; +import * as Timing from './Timing.js'; +import * as TypedArrayUtilities from './TypedArrayUtilities.js'; +import * as TypeScriptUtilities from './TypescriptUtilities.js'; +import * as UIString from './UIString.js'; +import * as UserVisibleError from './UserVisibleError.js'; +/* `assertNotNullOrUndefined` also need to be exposed, as TypeScript does + * not allow `asserts` functions to be used with qualified access (e.g. + * `Platform.TypeScriptUtilities.assertNotNullOrUndefined` causes a compile + * error). + */ +export { assertNever, assertNotNullOrUndefined, assertUnhandled } from './TypescriptUtilities.js'; +export { ArrayUtilities, Brand, DateUtilities, DevToolsPath, DOMUtilities, KeyboardUtilities, MapUtilities, MimeType, NumberUtilities, PromiseUtilities, SetUtilities, StringUtilities, Timing, TypedArrayUtilities, TypeScriptUtilities, UIString, UserVisibleError, }; +//# sourceMappingURL=platform.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/generated/protocol.js b/node_modules/@paulirish/trace_engine/generated/protocol.js new file mode 100644 index 000000000..db699a35c --- /dev/null +++ b/node_modules/@paulirish/trace_engine/generated/protocol.js @@ -0,0 +1,5 @@ +// Copyright (c) 2020 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export {}; +//# sourceMappingURL=protocol.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/cpu_profile/CPUProfileDataModel.js b/node_modules/@paulirish/trace_engine/models/cpu_profile/CPUProfileDataModel.js new file mode 100644 index 000000000..5dae558a0 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/cpu_profile/CPUProfileDataModel.js @@ -0,0 +1,508 @@ +// Copyright 2014 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../core/platform/platform.js'; +import { ProfileNode, ProfileTreeModel } from './ProfileTreeModel.js'; +export class CPUProfileNode extends ProfileNode { + id; + self; + // Position ticks are available in profile nodes coming from CDP + // profiles and not in those coming from tracing. They are used to + // calculate the line level execution time shown in the Sources panel + // after recording a profile. For trace CPU profiles we use the + // `lines` array instead. + positionTicks; + deoptReason; + constructor(node, samplingInterval /* milliseconds */) { + const callFrame = node.callFrame || { + // TODO(crbug.com/1172300) Ignored during the jsdoc to ts migration + // @ts-expect-error + functionName: node['functionName'], + // TODO(crbug.com/1172300) Ignored during the jsdoc to ts migration + // @ts-expect-error + scriptId: node['scriptId'], + // TODO(crbug.com/1172300) Ignored during the jsdoc to ts migration + // @ts-expect-error + url: node['url'], + // TODO(crbug.com/1172300) Ignored during the jsdoc to ts migration + // @ts-expect-error + lineNumber: node['lineNumber'] - 1, + // TODO(crbug.com/1172300) Ignored during the jsdoc to ts migration + // @ts-expect-error + columnNumber: node['columnNumber'] - 1, + }; + super(callFrame); + this.id = node.id; + this.self = (node.hitCount || 0) * samplingInterval; + this.positionTicks = node.positionTicks; + // Compatibility: legacy backends could provide "no reason" for optimized functions. + this.deoptReason = node.deoptReason && node.deoptReason !== 'no reason' ? node.deoptReason : null; + } +} +export class CPUProfileDataModel extends ProfileTreeModel { + profileStartTime; + profileEndTime; + timestamps; + samples; + lines; + totalHitCount; + profileHead; + /** + * A cache for the nodes we have parsed. + * Note: "Parsed" nodes are different from the "Protocol" nodes, the + * latter being the raw data we receive from the backend. + */ + #idToParsedNode; + gcNode; + programNode; + idleNode; + #stackStartTimes; + #stackChildrenDuration; + constructor(profile) { + super(); + // @ts-ignore Legacy types + const isLegacyFormat = Boolean(profile['head']); + if (isLegacyFormat) { + // Legacy format contains raw timestamps and start/stop times are in seconds. + this.profileStartTime = profile.startTime * 1000; + this.profileEndTime = profile.endTime * 1000; + // @ts-ignore Legacy types + this.timestamps = profile.timestamps; + this.compatibilityConversionHeadToNodes(profile); + } + else { + // Current format encodes timestamps as deltas. Start/stop times are in microseconds. + this.profileStartTime = profile.startTime / 1000; + this.profileEndTime = profile.endTime / 1000; + this.timestamps = this.convertTimeDeltas(profile); + } + this.samples = profile.samples; + // Lines are available only in profiles coming from tracing. + // Elements in the lines array have a 1 to 1 correspondance with + // samples, by array position. They can be 1 or 0 and indicate if + // there is line data for a given sample, i.e. if a given sample + // needs to be included to calculate the line level execution time + // data, which we show in the sources panel after recording a + // profile. + this.lines = profile.lines; + this.totalHitCount = 0; + this.profileHead = this.translateProfileTree(profile.nodes); + this.initialize(this.profileHead); + this.extractMetaNodes(); + if (this.samples?.length) { + this.sortSamples(); + this.normalizeTimestamps(); + this.fixMissingSamples(); + } + } + compatibilityConversionHeadToNodes(profile) { + // @ts-ignore Legacy types + if (!profile.head || profile.nodes) { + return; + } + const nodes = []; + // @ts-ignore Legacy types + convertNodesTree(profile.head); + profile.nodes = nodes; + // @ts-ignore Legacy types + delete profile.head; + function convertNodesTree(node) { + nodes.push(node); + // @ts-ignore Legacy types + node.children = node.children.map(convertNodesTree); + return node.id; + } + } + /** + * Calculate timestamps using timeDeltas. Some CPU profile formats, + * like the ones contained in traces have timeDeltas instead of + * timestamps. + */ + convertTimeDeltas(profile) { + if (!profile.timeDeltas) { + return []; + } + let lastTimeMicroSec = profile.startTime; + const timestamps = new Array(profile.timeDeltas.length); + for (let i = 0; i < profile.timeDeltas.length; ++i) { + lastTimeMicroSec += profile.timeDeltas[i]; + timestamps[i] = lastTimeMicroSec; + } + return timestamps; + } + /** + * Creates a Tree of CPUProfileNodes using the Protocol.Profiler.ProfileNodes. + * As the tree is built, samples of native code (prefixed with "native ") are + * filtered out. Samples of filtered nodes are replaced with the parent of the + * node being filtered. + * + * This function supports legacy and new definitions of the CDP Profiler.Profile + * type. + */ + translateProfileTree(nodes) { + function buildChildrenFromParents(nodes) { + if (nodes[0].children) { + return; + } + nodes[0].children = []; + for (let i = 1; i < nodes.length; ++i) { + const node = nodes[i]; + // @ts-ignore Legacy types + const parentNode = protocolNodeById.get(node.parent); + if (!parentNode) { + continue; + } + if (parentNode.children) { + parentNode.children.push(node.id); + } + else { + parentNode.children = [node.id]; + } + } + } + /** + * Calculate how many times each node was sampled in the profile, if + * not available in the profile data. + */ + function buildHitCountFromSamples(nodes, samples) { + // If hit count is available, this profile has the new format, so + // no need to continue.` + if (typeof (nodes[0].hitCount) === 'number') { + return; + } + if (!samples) { + throw new Error('Error: Neither hitCount nor samples are present in profile.'); + } + for (let i = 0; i < nodes.length; ++i) { + nodes[i].hitCount = 0; + } + for (let i = 0; i < samples.length; ++i) { + const node = protocolNodeById.get(samples[i]); + if (!node || node.hitCount === undefined) { + continue; + } + node.hitCount++; + } + } + // A cache for the raw nodes received from the traces / CDP. + const protocolNodeById = new Map(); + for (let i = 0; i < nodes.length; ++i) { + const node = nodes[i]; + protocolNodeById.set(node.id, node); + } + buildHitCountFromSamples(nodes, this.samples); + buildChildrenFromParents(nodes); + this.totalHitCount = nodes.reduce((acc, node) => acc + (node.hitCount || 0), 0); + const sampleTime = (this.profileEndTime - this.profileStartTime) / this.totalHitCount; + const root = nodes[0]; + // If a node is filtered out, its samples are replaced with its parent, + // so we keep track of the which id to use in the samples data. + const idToUseForRemovedNode = new Map([[root.id, root.id]]); + this.#idToParsedNode = new Map(); + const resultRoot = new CPUProfileNode(root, sampleTime); + this.#idToParsedNode.set(root.id, resultRoot); + if (!root.children) { + throw new Error('Missing children for root'); + } + const parentNodeStack = root.children.map(() => resultRoot); + const sourceNodeStack = root.children.map(id => protocolNodeById.get(id)); + while (sourceNodeStack.length) { + let parentNode = parentNodeStack.pop(); + const sourceNode = sourceNodeStack.pop(); + if (!sourceNode || !parentNode) { + continue; + } + if (!sourceNode.children) { + sourceNode.children = []; + } + const targetNode = new CPUProfileNode(sourceNode, sampleTime); + parentNode.children.push(targetNode); + parentNode = targetNode; + idToUseForRemovedNode.set(sourceNode.id, parentNode.id); + parentNodeStack.push.apply(parentNodeStack, sourceNode.children.map(() => parentNode)); + sourceNodeStack.push.apply(sourceNodeStack, sourceNode.children.map(id => protocolNodeById.get(id))); + this.#idToParsedNode.set(sourceNode.id, targetNode); + } + if (this.samples) { + this.samples = this.samples.map(id => idToUseForRemovedNode.get(id)); + } + return resultRoot; + } + /** + * Sorts the samples array using the timestamps array (there is a one + * to one matching by index between the two). + */ + sortSamples() { + if (!this.timestamps || !this.samples) { + return; + } + const timestamps = this.timestamps; + const samples = this.samples; + const orderedIndices = timestamps.map((_x, index) => index); + orderedIndices.sort((a, b) => timestamps[a] - timestamps[b]); + this.timestamps = []; + this.samples = []; + for (let i = 0; i < orderedIndices.length; i++) { + const orderedIndex = orderedIndices[i]; + this.timestamps.push(timestamps[orderedIndex]); + this.samples.push(samples[orderedIndex]); + } + } + /** + * Fills in timestamps and/or time deltas from legacy profiles where + * they could be missing. + */ + normalizeTimestamps() { + if (!this.samples) { + return; + } + let timestamps = this.timestamps; + if (!timestamps) { + // Support loading CPU profiles that are missing timestamps and + // timedeltas + const profileStartTime = this.profileStartTime; + const interval = (this.profileEndTime - profileStartTime) / this.samples.length; + // Add an extra timestamp used to calculate the last sample duration. + timestamps = new Array(this.samples.length + 1); + for (let i = 0; i < timestamps.length; ++i) { + timestamps[i] = profileStartTime + i * interval; + } + this.timestamps = timestamps; + return; + } + // Convert samples from micro to milliseconds + for (let i = 0; i < timestamps.length; ++i) { + timestamps[i] /= 1000; + } + if (this.samples.length === timestamps.length) { + // Add an extra timestamp used to calculate the last sample duration. + const lastTimestamp = timestamps.at(-1) || 0; + const averageIntervalTime = (lastTimestamp - timestamps[0]) / (timestamps.length - 1); + this.timestamps.push(lastTimestamp + averageIntervalTime); + } + this.profileStartTime = timestamps.at(0) || this.profileStartTime; + this.profileEndTime = timestamps.at(-1) || this.profileEndTime; + } + /** + * Some nodes do not refer to JS samples but to V8 system tasks, AKA + * "meta" nodes. This function extracts those nodes from the profile. + */ + extractMetaNodes() { + const topLevelNodes = this.profileHead.children; + for (let i = 0; i < topLevelNodes.length && !(this.gcNode && this.programNode && this.idleNode); i++) { + const node = topLevelNodes[i]; + if (node.functionName === '(garbage collector)') { + this.gcNode = node; + } + else if (node.functionName === '(program)') { + this.programNode = node; + } + else if (node.functionName === '(idle)') { + this.idleNode = node; + } + } + } + fixMissingSamples() { + // Sometimes the V8 sampler is not able to parse the JS stack and returns + // a (program) sample instead. The issue leads to call frames being split + // apart when they shouldn't. + // Here's a workaround for that. When there's a single (program) sample + // between two call stacks sharing the same bottom node, it is replaced + // with the preceeding sample. + const samples = this.samples; + if (!samples) { + return; + } + const samplesCount = samples.length; + if (!this.programNode || samplesCount < 3) { + return; + } + const idToNode = this.#idToParsedNode; + const programNodeId = this.programNode.id; + const gcNodeId = this.gcNode ? this.gcNode.id : -1; + const idleNodeId = this.idleNode ? this.idleNode.id : -1; + let prevNodeId = samples[0]; + let nodeId = samples[1]; + for (let sampleIndex = 1; sampleIndex < samplesCount - 1; sampleIndex++) { + const nextNodeId = samples[sampleIndex + 1]; + const prevNode = idToNode.get(prevNodeId); + const nextNode = idToNode.get(nextNodeId); + if (prevNodeId === undefined || nextNodeId === undefined || !prevNode || !nextNode) { + console.error(`Unexpectedly found undefined nodes: ${prevNodeId} ${nextNodeId}`); + continue; + } + if (nodeId === programNodeId && !isSystemNode(prevNodeId) && !isSystemNode(nextNodeId) && + bottomNode(prevNode) === bottomNode(nextNode)) { + samples[sampleIndex] = prevNodeId; + } + prevNodeId = nodeId; + nodeId = nextNodeId; + } + function bottomNode(node) { + while (node.parent && node.parent.parent) { + node = node.parent; + } + return node; + } + function isSystemNode(nodeId) { + return nodeId === programNodeId || nodeId === gcNodeId || nodeId === idleNodeId; + } + } + /** + * Traverses the call tree derived from the samples calling back when a call is opened + * and when it's closed + */ + forEachFrame(openFrameCallback, closeFrameCallback, startTime, stopTime) { + if (!this.profileHead || !this.samples) { + return; + } + startTime = startTime || 0; + stopTime = stopTime || Infinity; + const samples = this.samples; + const timestamps = this.timestamps; + const idToNode = this.#idToParsedNode; + const gcNode = this.gcNode; + const samplesCount = samples.length; + const startIndex = Platform.ArrayUtilities.lowerBound(timestamps, startTime, Platform.ArrayUtilities.DEFAULT_COMPARATOR); + let stackTop = 0; + const stackNodes = []; + let prevId = this.profileHead.id; + let sampleTime; + let gcParentNode = null; + // Extra slots for gc being put on top, + // and one at the bottom to allow safe stackTop-1 access. + const stackDepth = this.maxDepth + 3; + if (!this.#stackStartTimes) { + this.#stackStartTimes = new Array(stackDepth); + } + const stackStartTimes = this.#stackStartTimes; + if (!this.#stackChildrenDuration) { + this.#stackChildrenDuration = new Array(stackDepth); + } + const stackChildrenDuration = this.#stackChildrenDuration; + let node; + let sampleIndex; + for (sampleIndex = startIndex; sampleIndex < samplesCount; sampleIndex++) { + sampleTime = timestamps[sampleIndex]; + if (sampleTime >= stopTime) { + break; + } + const id = samples[sampleIndex]; + if (id === prevId) { + continue; + } + node = idToNode.get(id); + let prevNode = idToNode.get(prevId) || null; + if (!prevNode) { + continue; + } + if (gcNode && node === gcNode) { + // GC samples have no stack, so we just put GC node on top of the last recorded sample. + gcParentNode = prevNode; + openFrameCallback(gcParentNode.depth + 1, gcNode, sampleIndex, sampleTime); + stackStartTimes[++stackTop] = sampleTime; + stackChildrenDuration[stackTop] = 0; + prevId = id; + continue; + } + if (gcNode && prevNode === gcNode && gcParentNode) { + // end of GC frame + const start = stackStartTimes[stackTop]; + const duration = sampleTime - start; + stackChildrenDuration[stackTop - 1] += duration; + closeFrameCallback(gcParentNode.depth + 1, gcNode, sampleIndex, start, duration, duration - stackChildrenDuration[stackTop]); + --stackTop; + prevNode = gcParentNode; + prevId = prevNode.id; + gcParentNode = null; + } + // If the depth of this node is greater than the depth of the + // previous one, new calls happened in between and we need to open + // them, so track all of them in stackNodes. + while (node && node.depth > prevNode.depth) { + stackNodes.push(node); + node = node.parent; + } + // If `prevNode` differs from `node`, the current sample was taken + // after a change in the call stack, meaning that frames in the + // path of `prevNode` that differ from those in the path of `node` + // can be closed. So go down to the lowest common ancestor and + // close current intervals. + // + // For example: + // + // prevNode node + // | | + // v v + // [---D--] + // [---C--][--E--] + // [------B------] <- LCA + // [------A------] + // + // Because a sample was taken with A, B and E in the stack, it + // means C and D finished and we can close them. + while (prevNode && prevNode !== node) { + const start = stackStartTimes[stackTop]; + const duration = sampleTime - start; + stackChildrenDuration[stackTop - 1] += duration; + closeFrameCallback(prevNode.depth, prevNode, sampleIndex, start, duration, duration - stackChildrenDuration[stackTop]); + --stackTop; + // Track calls to open after previous calls were closed + // In the example above, this would add E to the tracking stack. + if (node && node.depth === prevNode.depth) { + stackNodes.push(node); + node = node.parent; + } + prevNode = prevNode.parent; + } + // Go up the nodes stack and open new intervals. + while (stackNodes.length) { + const currentNode = stackNodes.pop(); + if (!currentNode) { + break; + } + node = currentNode; + openFrameCallback(currentNode.depth, currentNode, sampleIndex, sampleTime); + stackStartTimes[++stackTop] = sampleTime; + stackChildrenDuration[stackTop] = 0; + } + prevId = id; + } + // Close remaining intervals. + sampleTime = timestamps[sampleIndex] || this.profileEndTime; + if (node && gcParentNode && idToNode.get(prevId) === gcNode) { + const start = stackStartTimes[stackTop]; + const duration = sampleTime - start; + stackChildrenDuration[stackTop - 1] += duration; + closeFrameCallback(gcParentNode.depth + 1, node, sampleIndex, start, duration, duration - stackChildrenDuration[stackTop]); + --stackTop; + prevId = gcParentNode.id; + } + for (let node = idToNode.get(prevId); node && node.parent; node = node.parent) { + const start = stackStartTimes[stackTop]; + const duration = sampleTime - start; + stackChildrenDuration[stackTop - 1] += duration; + closeFrameCallback(node.depth, node, sampleIndex, start, duration, duration - stackChildrenDuration[stackTop]); + --stackTop; + } + } + /** + * Returns the node that corresponds to a given index of a sample. + */ + nodeByIndex(index) { + return this.samples && this.#idToParsedNode.get(this.samples[index]) || null; + } + /** + * Returns the node that corresponds to a given node id. + */ + nodeById(nodeId) { + return this.#idToParsedNode.get(nodeId) || null; + } + nodes() { + if (!this.#idToParsedNode) { + return null; + } + return [...this.#idToParsedNode.values()]; + } +} +//# sourceMappingURL=CPUProfileDataModel.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/cpu_profile/ProfileTreeModel.js b/node_modules/@paulirish/trace_engine/models/cpu_profile/ProfileTreeModel.js new file mode 100644 index 000000000..5647c9ade --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/cpu_profile/ProfileTreeModel.js @@ -0,0 +1,95 @@ +// Copyright 2016 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export class ProfileNode { + callFrame; + callUID; + self; + total; + id; + parent; + children; + functionName; + depth; + deoptReason; + constructor(callFrame) { + this.callFrame = callFrame; + this.callUID = `${callFrame.functionName}@${callFrame.scriptId}:${callFrame.lineNumber}:${callFrame.columnNumber}`; + this.self = 0; + this.total = 0; + this.id = 0; + this.functionName = callFrame.functionName; + this.parent = null; + this.children = []; + } + get scriptId() { + return String(this.callFrame.scriptId); + } + get url() { + return this.callFrame.url; + } + get lineNumber() { + return this.callFrame.lineNumber; + } + get columnNumber() { + return this.callFrame.columnNumber; + } + setFunctionName(name) { + if (name === null) { + return; + } + this.functionName = name; + } +} +export class ProfileTreeModel { + root; + total; + maxDepth; + constructor() { + } + initialize(root) { + this.root = root; + this.assignDepthsAndParents(); + this.total = this.calculateTotals(this.root); + } + assignDepthsAndParents() { + const root = this.root; + // TODO(crbug.com/1354548): start depth from 0 once profiler + // panel dependencies are gone. + root.depth = -1; + root.parent = null; + this.maxDepth = 0; + const nodesToTraverse = [root]; + while (nodesToTraverse.length) { + const parent = nodesToTraverse.pop(); + const depth = parent.depth + 1; + if (depth > this.maxDepth) { + this.maxDepth = depth; + } + const children = parent.children; + for (const child of children) { + child.depth = depth; + child.parent = parent; + nodesToTraverse.push(child); + } + } + } + calculateTotals(root) { + const nodesToTraverse = [root]; + const dfsList = []; + while (nodesToTraverse.length) { + const node = nodesToTraverse.pop(); + node.total = node.self; + dfsList.push(node); + nodesToTraverse.push(...node.children); + } + while (dfsList.length > 1) { + const node = dfsList.pop(); + if (node.parent) { + node.parent.total += node.total; + } + } + return root.total; + } +} +//# sourceMappingURL=ProfileTreeModel.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/cpu_profile/bundle-tsconfig.json b/node_modules/@paulirish/trace_engine/models/cpu_profile/bundle-tsconfig.json new file mode 100644 index 000000000..b9aa91e3b --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/cpu_profile/bundle-tsconfig.json @@ -0,0 +1 @@ +{"compilerOptions":{"composite":true,"outDir":".","baseUrl":".","rootDir":"../../../../../../front_end/models/cpu_profile"},"files":["../../../../../../front_end/models/cpu_profile/cpu_profile.ts"],"references":[{"path":"./cpu_profile-tsconfig.json"}]} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/cpu_profile/cpu_profile-tsconfig.json b/node_modules/@paulirish/trace_engine/models/cpu_profile/cpu_profile-tsconfig.json new file mode 100644 index 000000000..85a81cfd2 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/cpu_profile/cpu_profile-tsconfig.json @@ -0,0 +1,50 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../front_end/models/cpu_profile", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "cpu_profile-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../front_end/models/cpu_profile/CPUProfileDataModel.ts", + "../../../../../../front_end/models/cpu_profile/ProfileTreeModel.ts", + "../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "../../core/common/bundle-tsconfig.json" + }, + { + "path": "../../core/platform/bundle-tsconfig.json" + }, + { + "path": "../../generated/generated-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/cpu_profile/cpu_profile.js b/node_modules/@paulirish/trace_engine/models/cpu_profile/cpu_profile.js new file mode 100644 index 000000000..7a2566374 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/cpu_profile/cpu_profile.js @@ -0,0 +1,7 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as CPUProfileDataModel from './CPUProfileDataModel.js'; +import * as ProfileTreeModel from './ProfileTreeModel.js'; +export { CPUProfileDataModel, ProfileTreeModel, }; +//# sourceMappingURL=cpu_profile.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/cpu_profile/devtools_entrypoint-bundle-typescript-tsconfig.json b/node_modules/@paulirish/trace_engine/models/cpu_profile/devtools_entrypoint-bundle-typescript-tsconfig.json new file mode 100644 index 000000000..00d1e74d3 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/cpu_profile/devtools_entrypoint-bundle-typescript-tsconfig.json @@ -0,0 +1,43 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../front_end/models/cpu_profile", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "devtools_entrypoint-bundle-typescript-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../front_end/models/cpu_profile/cpu_profile.ts", + "../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "./cpu_profile-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/EntriesFilter.js b/node_modules/@paulirish/trace_engine/models/trace/EntriesFilter.js new file mode 100644 index 000000000..2382aa13b --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/EntriesFilter.js @@ -0,0 +1,294 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../core/platform/platform.js'; +import * as Helpers from './helpers/helpers.js'; +import * as Types from './types/types.js'; +/** + * This class can take in a thread that has been generated by the + * RendererHandler and apply certain actions to it in order to modify what is + * shown to the user. These actions can be automatically applied by DevTools or + * applied by the user. + * + * Once actions are applied, the invisibleEntries() method will return the + * entries that are invisible, and this is the list of entries that should be + * removed before rendering the resulting thread on the timeline. + **/ +export class EntriesFilter { + // Maps from an individual TraceEvent entry to its representation as a + // RendererEntryNode. We need this so we can then parse the tree structure + // generated by the RendererHandler. + #entryToNode; + // Track the set of invisible entries. + #invisibleEntries = []; + // List of entries whose children are modified. This list is used to + // keep track of entries that should be identified in the UI as modified. + #modifiedVisibleEntries = []; + // Cache for descendants of entry that have already been gathered. The descendants + // will never change so we can avoid running the potentially expensive search again. + #entryToDescendantsMap = new Map(); + constructor(entryToNodeMap) { + this.#entryToNode = entryToNodeMap; + } + /** + * Checks which actions can be applied on an entry. This allows us to only show possible actions in the Context Menu. + * For example, if an entry has no children, COLLAPSE_FUNCTION will not change the FlameChart, therefore there is no need to show this action as an option. + **/ + findPossibleActions(entry) { + const entryNode = this.#entryToNode.get(entry); + if (!entryNode) { + // Invalid node was given, return no possible actions. + return { + ["MERGE_FUNCTION" /* FilterAction.MERGE_FUNCTION */]: false, + ["COLLAPSE_FUNCTION" /* FilterAction.COLLAPSE_FUNCTION */]: false, + ["COLLAPSE_REPEATING_DESCENDANTS" /* FilterAction.COLLAPSE_REPEATING_DESCENDANTS */]: false, + ["RESET_CHILDREN" /* FilterAction.RESET_CHILDREN */]: false, + ["UNDO_ALL_ACTIONS" /* FilterAction.UNDO_ALL_ACTIONS */]: false, + }; + } + const entryParent = entryNode.parent; + const allVisibleDescendants = this.#findAllDescendantsOfNode(entryNode).filter(descendant => !this.#invisibleEntries.includes(descendant)); + const allVisibleRepeatingDescendants = this.#findAllRepeatingDescendantsOfNext(entryNode).filter(descendant => !this.#invisibleEntries.includes(descendant)); + const allInVisibleDescendants = this.#findAllDescendantsOfNode(entryNode).filter(descendant => this.#invisibleEntries.includes(descendant)); + // If there are children to hide, indicate action as possible + const possibleActions = { + ["MERGE_FUNCTION" /* FilterAction.MERGE_FUNCTION */]: entryParent !== null, + ["COLLAPSE_FUNCTION" /* FilterAction.COLLAPSE_FUNCTION */]: allVisibleDescendants.length > 0, + ["COLLAPSE_REPEATING_DESCENDANTS" /* FilterAction.COLLAPSE_REPEATING_DESCENDANTS */]: allVisibleRepeatingDescendants.length > 0, + ["RESET_CHILDREN" /* FilterAction.RESET_CHILDREN */]: allInVisibleDescendants.length > 0, + ["UNDO_ALL_ACTIONS" /* FilterAction.UNDO_ALL_ACTIONS */]: this.#invisibleEntries.length > 0, + }; + return possibleActions; + } + /** + * Returns the amount of entry descendants that belong to the hidden entries array. + * **/ + findHiddenDescendantsAmount(entry) { + const entryNode = this.#entryToNode.get(entry); + if (!entryNode) { + return 0; + } + const allDescendants = this.#findAllDescendantsOfNode(entryNode); + return allDescendants.filter(descendant => this.invisibleEntries().includes(descendant)).length; + } + /** + * Returns the set of entries that are invisible given the set of applied actions. + **/ + invisibleEntries() { + return this.#invisibleEntries; + } + /** + * Sets invisible and modified entries. Called when a trace with annotations is loaded and some entries are set as hidden and modified. + * Both arrays are set together because if there is one, the other must be present too. + **/ + setInvisibleAndModifiedEntries(invisibleEntries, modifiedEntries) { + this.#invisibleEntries.push(...invisibleEntries); + this.#modifiedVisibleEntries.push(...modifiedEntries); + } + inEntryInvisible(entry) { + return this.#invisibleEntries.includes(entry); + } + /** + * Returns the array of entries that have a sign indicating that entries below are hidden. + **/ + modifiedEntries() { + return this.#modifiedVisibleEntries; + } + /** + * Applies an action to hide entries or removes entries + * from hidden entries array depending on the action. + **/ + applyFilterAction(action) { + // We apply new user action to the set of all entries, and mark + // any that should be hidden by adding them to this set. + // Another approach would be to use splice() to remove items from the + // array, but doing this would be a mutation of the arry for every hidden + // event. Instead, we add entries to this set and return it as an array at the end. + const entriesToHide = new Set(); + switch (action.type) { + case "MERGE_FUNCTION" /* FilterAction.MERGE_FUNCTION */: { + // The entry that was clicked on is merged into its parent. All its + // children remain visible, so we just have to hide the entry that was + // selected. + entriesToHide.add(action.entry); + // If parent node exists, add it to modifiedVisibleEntries, so it would be possible to uncollapse its' children. + const actionNode = this.#entryToNode.get(action.entry) || null; + const parentNode = actionNode && this.#findNextVisibleParent(actionNode); + if (parentNode) { + this.#addModifiedEntry(parentNode.entry); + } + break; + } + case "COLLAPSE_FUNCTION" /* FilterAction.COLLAPSE_FUNCTION */: { + // The entry itself remains visible, but all of its descendants are hidden. + const entryNode = this.#entryToNode.get(action.entry); + if (!entryNode) { + // Invalid node was given, just ignore and move on. + break; + } + const allDescendants = this.#findAllDescendantsOfNode(entryNode); + allDescendants.forEach(descendant => entriesToHide.add(descendant)); + this.#addModifiedEntry(action.entry); + break; + } + case "COLLAPSE_REPEATING_DESCENDANTS" /* FilterAction.COLLAPSE_REPEATING_DESCENDANTS */: { + const entryNode = this.#entryToNode.get(action.entry); + if (!entryNode) { + // Invalid node was given, just ignore and move on. + break; + } + const allRepeatingDescendants = this.#findAllRepeatingDescendantsOfNext(entryNode); + allRepeatingDescendants.forEach(descendant => entriesToHide.add(descendant)); + if (entriesToHide.size > 0) { + this.#addModifiedEntry(action.entry); + } + break; + } + case "UNDO_ALL_ACTIONS" /* FilterAction.UNDO_ALL_ACTIONS */: { + this.#invisibleEntries = []; + this.#modifiedVisibleEntries = []; + break; + } + case "RESET_CHILDREN" /* FilterAction.RESET_CHILDREN */: { + this.#makeEntryChildrenVisible(action.entry); + break; + } + default: + Platform.assertNever(action.type, `Unknown EntriesFilter action: ${action.type}`); + } + this.#invisibleEntries.push(...entriesToHide); + return this.#invisibleEntries; + } + /** + * Add an entry to the array of entries that have a sign indicating that entries below are hidden. + * Also, remove all of the child entries of the new modified entry from the modified array. Do that because + * to draw the initiator from the closest visible entry, we need to get the closest entry that is + * marked as modified and we do not want to get some that are hidden. + */ + #addModifiedEntry(entry) { + this.#modifiedVisibleEntries.push(entry); + const entryNode = this.#entryToNode.get(entry); + if (!entryNode) { + // Invalid node was given, just ignore and move on. + return; + } + const allDescendants = this.#findAllDescendantsOfNode(entryNode); + if (allDescendants.length > 0) { + this.#modifiedVisibleEntries = this.#modifiedVisibleEntries.filter(entry => { + return !allDescendants.includes(entry); + }); + } + } + // The direct parent might be hidden by other actions, therefore we look for the next visible parent. + #findNextVisibleParent(node) { + let parent = node.parent; + while (parent && this.#invisibleEntries.includes(parent.entry)) { + parent = parent.parent; + } + return parent; + } + #findAllDescendantsOfNode(root) { + const cachedDescendants = this.#entryToDescendantsMap.get(root); + if (cachedDescendants) { + return cachedDescendants; + } + const descendants = []; + // Walk through all the descendants, starting at the root node. + const children = [...root.children]; + while (children.length > 0) { + const childNode = children.shift(); + if (childNode) { + descendants.push(childNode.entry); + const childNodeCachedDescendants = this.#entryToDescendantsMap.get(childNode); + // If the descendants of a child are cached, get them from the cache instead of iterating through them again + if (childNodeCachedDescendants) { + descendants.push(...childNodeCachedDescendants); + } + else { + children.push(...childNode.children); + } + } + } + this.#entryToDescendantsMap.set(root, descendants); + return descendants; + } + #findAllRepeatingDescendantsOfNext(root) { + // Walk through all the ancestors, starting at the root node. + const children = [...root.children]; + const repeatingNodes = []; + const rootIsProfileCall = Types.TraceEvents.isProfileCall(root.entry); + while (children.length > 0) { + const childNode = children.shift(); + if (childNode) { + const childIsProfileCall = Types.TraceEvents.isProfileCall(childNode.entry); + if ( /* Handle SyntheticProfileCalls */rootIsProfileCall && childIsProfileCall) { + const rootNodeEntry = root.entry; + const childNodeEntry = childNode.entry; + if (Helpers.SamplesIntegrator.SamplesIntegrator.framesAreEqual(rootNodeEntry.callFrame, childNodeEntry.callFrame)) { + repeatingNodes.push(childNode.entry); + } + } /* Handle SyntheticRendererEvents */ + else if (!rootIsProfileCall && !childIsProfileCall) { + if (root.entry.name === childNode.entry.name) { + repeatingNodes.push(childNode.entry); + } + } + children.push(...childNode.children); + } + } + return repeatingNodes; + } + /** + * If an entry was selected from a link instead of clicking on it, + * it might be in the invisible entries array. + * If it is, reveal it by resetting clidren the closest modified entry, + */ + revealEntry(entry) { + const entryNode = this.#entryToNode.get(entry); + if (!entryNode) { + // Invalid node was given, just ignore and move on. + return; + } + let closestModifiedParent = entryNode; + while (closestModifiedParent.parent && !this.#modifiedVisibleEntries.includes(closestModifiedParent.entry)) { + closestModifiedParent = closestModifiedParent.parent; + } + this.#makeEntryChildrenVisible(closestModifiedParent.entry); + } + /** + * Removes all of the entry children from the + * invisible entries array to make them visible. + **/ + #makeEntryChildrenVisible(entry) { + const entryNode = this.#entryToNode.get(entry); + if (!entryNode) { + // Invalid node was given, just ignore and move on. + return; + } + const descendants = this.#findAllDescendantsOfNode(entryNode); + /** + * Filter out all descendant of the node + * from the invisible entries list. + **/ + this.#invisibleEntries = this.#invisibleEntries.filter(entry => { + if (descendants.includes(entry)) { + return false; + } + return true; + }); + /** + * Filter out all descentants and entry from modified entries + * list to not show that some entries below those are hidden. + **/ + this.#modifiedVisibleEntries = this.#modifiedVisibleEntries.filter(iterEntry => { + if (descendants.includes(iterEntry) || iterEntry === entry) { + return false; + } + return true; + }); + } + isEntryModified(event) { + return this.#modifiedVisibleEntries.includes(event); + } +} +//# sourceMappingURL=EntriesFilter.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/LegacyTracingModel.js b/node_modules/@paulirish/trace_engine/models/trace/LegacyTracingModel.js new file mode 100644 index 000000000..cb0ff5c3b --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/LegacyTracingModel.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@paulirish/trace_engine/models/trace/ModelImpl.js b/node_modules/@paulirish/trace_engine/models/trace/ModelImpl.js new file mode 100644 index 000000000..78f3e7284 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/ModelImpl.js @@ -0,0 +1,179 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../core/platform/platform.js'; +import * as Handlers from './handlers/handlers.js'; +import * as Helpers from './helpers/helpers.js'; +import { TraceParseProgressEvent, TraceProcessor } from './Processor.js'; +import * as Types from './types/types.js'; +/** + * The new trace engine model we are migrating to. The Model is responsible for + * parsing arrays of raw trace events and storing the resulting data. It can + * store multiple traces at once, and can return the data for any of them. + * Currently as we migrate from the old engine to this, we are turning on the + * model handlers incrementally as we need the data, to save performance costs + * of running handlers that we do not use. Therefore, when the model is + * constructed we pass through a set of handlers that should be used. Once we + * have migrated all tracks in the Performance Panel to this model, we can + * remove this ability to run a subset of handlers, as we will need all handlers + * to be used at that point. For tests, if you want to construct a model with + * all handlers, you can use the static `Model.createWithAllHandlers` method. + **/ +export class Model extends EventTarget { + #traces = []; + #nextNumberByDomain = new Map(); + #recordingsAvailable = []; + #lastRecordingIndex = 0; + #processor; + #config = Types.Configuration.defaults(); + static createWithAllHandlers(config) { + return new Model(Handlers.ModelHandlers, config); + } + constructor(handlers, config) { + super(); + if (config) { + this.#config = config; + } + this.#processor = new TraceProcessor(handlers, this.#config); + } + /** + * Parses an array of trace events into a structured object containing all the + * information parsed by the trace handlers. + * You can `await` this function to pause execution until parsing is complete, + * or instead rely on the `ModuleUpdateEvent` that is dispatched when the + * parsing is finished. + * + * Once parsed, you then have to call the `traceParsedData` method, providing an + * index of the trace you want to have the data for. This is because any model + * can store a number of traces. Each trace is given an index, which starts at 0 + * and increments by one as a new trace is parsed. + * + * @example + * // Awaiting the parse method() to block until parsing complete + * await this.traceModel.parse(events); + * const data = this.traceModel.traceParsedData(0) + * + * @example + * // Using an event listener to be notified when tracing is complete. + * this.traceModel.addEventListener(Trace.ModelUpdateEvent.eventName, (event) => { + * if(event.data.data === 'done') { + * // trace complete + * const data = this.traceModel.traceParsedData(0); + * } + * }); + * void this.traceModel.parse(events); + **/ + async parse(traceEvents, config) { + const metadata = config?.metadata || {}; + const isFreshRecording = config?.isFreshRecording || false; + // During parsing, periodically update any listeners on each processors' + // progress (if they have any updates). + const onTraceUpdate = (event) => { + const { data } = event; + this.dispatchEvent(new ModelUpdateEvent({ type: "PROGRESS_UPDATE" /* ModelUpdateType.PROGRESS_UPDATE */, data: data })); + }; + this.#processor.addEventListener(TraceParseProgressEvent.eventName, onTraceUpdate); + // Create a parsed trace file. It will be populated with data from the processor. + const file = { + traceEvents, + metadata, + traceParsedData: null, + traceInsights: null, + }; + try { + // Wait for all outstanding promises before finishing the async execution, + // but perform all tasks in parallel. + await this.#processor.parse(traceEvents, isFreshRecording); + this.#storeParsedFileData(file, this.#processor.traceParsedData, this.#processor.insights); + // We only push the file onto this.#traces here once we know it's valid + // and there's been no errors in the parsing. + this.#traces.push(file); + } + catch (e) { + throw e; + } + finally { + // All processors have finished parsing, no more updates are expected. + this.#processor.removeEventListener(TraceParseProgressEvent.eventName, onTraceUpdate); + // Finally, update any listeners that all processors are 'done'. + this.dispatchEvent(new ModelUpdateEvent({ type: "COMPLETE" /* ModelUpdateType.COMPLETE */, data: 'done' })); + } + } + #storeParsedFileData(file, data, insights) { + file.traceParsedData = data; + file.traceInsights = insights; + this.#lastRecordingIndex++; + let recordingName = `Trace ${this.#lastRecordingIndex}`; + let origin = null; + if (file.traceParsedData) { + origin = Helpers.Trace.extractOriginFromTrace(file.traceParsedData.Meta.mainFrameURL); + if (origin) { + const nextSequenceForDomain = Platform.MapUtilities.getWithDefault(this.#nextNumberByDomain, origin, () => 1); + recordingName = `${origin} (${nextSequenceForDomain})`; + this.#nextNumberByDomain.set(origin, nextSequenceForDomain + 1); + } + } + this.#recordingsAvailable.push(recordingName); + } + /** + * Returns the parsed trace data indexed by the order in which it was stored. + * If no index is given, the last stored parsed data is returned. + */ + traceParsedData(index = this.#traces.length - 1) { + if (!this.#traces[index]) { + return null; + } + return this.#traces[index].traceParsedData; + } + traceInsights(index = this.#traces.length - 1) { + if (!this.#traces[index]) { + return null; + } + return this.#traces[index].traceInsights; + } + metadata(index) { + if (!this.#traces[index]) { + return null; + } + return this.#traces[index].metadata; + } + overrideAnnotations(index, newAnnotations) { + if (this.#traces[index]) { + this.#traces[index].metadata.annotations = newAnnotations; + } + } + traceEvents(index) { + if (!this.#traces[index]) { + return null; + } + return this.#traces[index].traceEvents; + } + size() { + return this.#traces.length; + } + deleteTraceByIndex(recordingIndex) { + this.#traces.splice(recordingIndex, 1); + this.#recordingsAvailable.splice(recordingIndex, 1); + } + getRecordingsAvailable() { + return this.#recordingsAvailable; + } + resetProcessor() { + this.#processor.reset(); + } +} +export class ModelUpdateEvent extends Event { + data; + static eventName = 'modelupdate'; + constructor(data) { + super(ModelUpdateEvent.eventName); + this.data = data; + } +} +export function isModelUpdateDataComplete(eventData) { + return eventData.type === "COMPLETE" /* ModelUpdateType.COMPLETE */; +} +export function isModelUpdateDataProgress(eventData) { + return eventData.type === "PROGRESS_UPDATE" /* ModelUpdateType.PROGRESS_UPDATE */; +} +//# sourceMappingURL=ModelImpl.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/Processor.js b/node_modules/@paulirish/trace_engine/models/trace/Processor.js new file mode 100644 index 000000000..c6029f7a8 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/Processor.js @@ -0,0 +1,269 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Handlers from './handlers/handlers.js'; +import * as Insights from './insights/insights.js'; +import * as Types from './types/types.js'; +export class TraceParseProgressEvent extends Event { + data; + static eventName = 'traceparseprogress'; + constructor(data, init = { bubbles: true }) { + super(TraceParseProgressEvent.eventName, init); + this.data = data; + } +} +export class TraceProcessor extends EventTarget { + // We force the Meta handler to be enabled, so the TraceHandlers type here is + // the model handlers the user passes in and the Meta handler. + #traceHandlers; + #status = "IDLE" /* Status.IDLE */; + #modelConfiguration = Types.Configuration.defaults(); + #data = null; + #insights = null; + static createWithAllHandlers() { + return new TraceProcessor(Handlers.ModelHandlers, Types.Configuration.defaults()); + } + constructor(traceHandlers, modelConfiguration) { + super(); + this.#verifyHandlers(traceHandlers); + this.#traceHandlers = { + Meta: Handlers.ModelHandlers.Meta, + ...traceHandlers, + }; + if (modelConfiguration) { + this.#modelConfiguration = modelConfiguration; + } + this.#passConfigToHandlers(); + } + #passConfigToHandlers() { + for (const handler of Object.values(this.#traceHandlers)) { + // Bit of an odd double check, but without this TypeScript refuses to let + // you call the function as it thinks it might be undefined. + if ('handleUserConfig' in handler && handler.handleUserConfig) { + handler.handleUserConfig(this.#modelConfiguration); + } + } + } + /** + * When the user passes in a set of handlers, we want to ensure that we have all + * the required handlers. Handlers can depend on other handlers, so if the user + * passes in FooHandler which depends on BarHandler, they must also pass in + * BarHandler too. This method verifies that all dependencies are met, and + * throws if not. + **/ + #verifyHandlers(providedHandlers) { + // Tiny optimisation: if the amount of provided handlers matches the amount + // of handlers in the Handlers.ModelHandlers object, that means that the + // user has passed in every handler we have. So therefore they cannot have + // missed any, and there is no need to iterate through the handlers and + // check the dependencies. + if (Object.keys(providedHandlers).length === Object.keys(Handlers.ModelHandlers).length) { + return; + } + const requiredHandlerKeys = new Set(); + for (const [handlerName, handler] of Object.entries(providedHandlers)) { + requiredHandlerKeys.add(handlerName); + for (const depName of (handler.deps?.() || [])) { + requiredHandlerKeys.add(depName); + } + } + const providedHandlerKeys = new Set(Object.keys(providedHandlers)); + // We always force the Meta handler to be enabled when creating the + // Processor, so if it is missing from the set the user gave us that is OK, + // as we will have enabled it anyway. + requiredHandlerKeys.delete('Meta'); + for (const requiredKey of requiredHandlerKeys) { + if (!providedHandlerKeys.has(requiredKey)) { + throw new Error(`Required handler ${requiredKey} not provided.`); + } + } + } + reset() { + if (this.#status === "PARSING" /* Status.PARSING */) { + throw new Error('Trace processor can\'t reset while parsing.'); + } + const handlers = Object.values(this.#traceHandlers); + for (const handler of handlers) { + handler.reset(); + } + this.#data = null; + this.#insights = null; + this.#status = "IDLE" /* Status.IDLE */; + } + async parse(traceEvents, freshRecording = false) { + if (this.#status !== "IDLE" /* Status.IDLE */) { + throw new Error(`Trace processor can't start parsing when not idle. Current state: ${this.#status}`); + } + try { + this.#status = "PARSING" /* Status.PARSING */; + await this.#parse(traceEvents, freshRecording); + this.#status = "FINISHED_PARSING" /* Status.FINISHED_PARSING */; + } + catch (e) { + this.#status = "ERRORED_WHILE_PARSING" /* Status.ERRORED_WHILE_PARSING */; + throw e; + } + } + async #parse(traceEvents, freshRecording) { + /** + * We want to yield regularly to maintain responsiveness. If we yield too often, we're wasting idle time. + * We could do this by checking `performance.now()` regularly, but it's an expensive call in such a hot loop. + * `eventsPerChunk` is an approximated proxy metric. + * But how big a chunk? We're aiming for long tasks that are no smaller than 100ms and not bigger than 200ms. + * It's CPU dependent, so it should be calibrated on oldish hardware. + * Illustration of a previous change to `eventsPerChunk`: https://imgur.com/wzp8BnR + */ + const eventsPerChunk = 50_000; + // Convert to array so that we are able to iterate all handlers multiple times. + const sortedHandlers = [...sortHandlers(this.#traceHandlers).values()]; + // Reset. + for (const handler of sortedHandlers) { + handler.reset(); + } + // Initialize. + for (const handler of sortedHandlers) { + handler.initialize?.(freshRecording); + } + // Handle each event. + for (let i = 0; i < traceEvents.length; ++i) { + // Every so often we take a break just to render. + if (i % eventsPerChunk === 0 && i) { + // Take the opportunity to provide status update events. + this.dispatchEvent(new TraceParseProgressEvent({ index: i, total: traceEvents.length })); + // TODO(paulirish): consider using `scheduler.yield()` or `scheduler.postTask(() => {}, {priority: 'user-blocking'})` + await new Promise(resolve => setTimeout(resolve, 0)); + } + const event = traceEvents[i]; + for (let j = 0; j < sortedHandlers.length; ++j) { + sortedHandlers[j].handleEvent(event); + } + } + // Finalize. + for (const handler of sortedHandlers) { + await handler.finalize?.(); + } + } + get traceParsedData() { + if (this.#status !== "FINISHED_PARSING" /* Status.FINISHED_PARSING */) { + return null; + } + if (this.#data) { + return this.#data; + } + // Handlers that depend on other handlers do so via .data(), which used to always + // return a shallow clone of its internal data structures. However, that pattern + // easily results in egregious amounts of allocation. Now .data() does not do any + // cloning, and it happens here instead so that users of the trace processor may + // still assume that the parsed data is theirs. + // See: crbug/41484172 + const shallowClone = (value, recurse = true) => { + if (value instanceof Map) { + return new Map(value); + } + if (value instanceof Set) { + return new Set(value); + } + if (Array.isArray(value)) { + return [...value]; + } + if (typeof value === 'object' && value && recurse) { + const obj = {}; + for (const [key, v] of Object.entries(value)) { + obj[key] = shallowClone(v, false); + } + return obj; + } + return value; + }; + const traceParsedData = {}; + for (const [name, handler] of Object.entries(this.#traceHandlers)) { + const data = shallowClone(handler.data()); + Object.assign(traceParsedData, { [name]: data }); + } + this.#data = traceParsedData; + return this.#data; + } + #getEnabledInsightRunners(traceParsedData) { + const enabledInsights = {}; + for (const [name, insight] of Object.entries(Insights.InsightRunners)) { + const deps = insight.deps(); + if (deps.some(dep => !traceParsedData[dep])) { + continue; + } + Object.assign(enabledInsights, { [name]: insight.generateInsight }); + } + return enabledInsights; + } + get insights() { + if (!this.traceParsedData) { + return null; + } + if (this.#insights) { + return this.#insights; + } + this.#insights = new Map(); + const enabledInsightRunners = this.#getEnabledInsightRunners(this.traceParsedData); + for (const nav of this.traceParsedData.Meta.mainFrameNavigations) { + if (!nav.args.frame || !nav.args.data?.navigationId) { + continue; + } + const context = { + frameId: nav.args.frame, + navigationId: nav.args.data.navigationId, + }; + const navInsightData = {}; + for (const [name, generateInsight] of Object.entries(enabledInsightRunners)) { + let insightResult; + try { + insightResult = generateInsight(this.traceParsedData, context); + } + catch (err) { + insightResult = err; + } + Object.assign(navInsightData, { [name]: insightResult }); + } + this.#insights.set(context.navigationId, navInsightData); + } + return this.#insights; + } +} +/** + * Some Handlers need data provided by others. Dependencies of a handler handler are + * declared in the `deps` field. + * @returns A map from trace event handler name to trace event hander whose entries + * iterate in such a way that each handler is visited after its dependencies. + */ +export function sortHandlers(traceHandlers) { + const sortedMap = new Map(); + const visited = new Set(); + const visitHandler = (handlerName) => { + if (sortedMap.has(handlerName)) { + return; + } + if (visited.has(handlerName)) { + let stackPath = ''; + for (const handler of visited) { + if (stackPath || handler === handlerName) { + stackPath += `${handler}->`; + } + } + stackPath += handlerName; + throw new Error(`Found dependency cycle in trace event handlers: ${stackPath}`); + } + visited.add(handlerName); + const handler = traceHandlers[handlerName]; + if (!handler) { + return; + } + const deps = handler.deps?.(); + if (deps) { + deps.forEach(visitHandler); + } + sortedMap.set(handlerName, handler); + }; + for (const handlerName of Object.keys(traceHandlers)) { + visitHandler(handlerName); + } + return sortedMap; +} +//# sourceMappingURL=Processor.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/TracingManager.js b/node_modules/@paulirish/trace_engine/models/trace/TracingManager.js new file mode 100644 index 000000000..cb0ff5c3b --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/TracingManager.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@paulirish/trace_engine/models/trace/bundle-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/bundle-tsconfig.json new file mode 100644 index 000000000..fa860a0f8 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/bundle-tsconfig.json @@ -0,0 +1 @@ +{"compilerOptions":{"composite":true,"outDir":".","baseUrl":".","rootDir":"../../../../../../front_end/models/trace"},"files":["../../../../../../front_end/models/trace/trace.ts"],"references":[{"path":"./trace-tsconfig.json"}]} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/devtools_entrypoint-bundle-typescript-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/devtools_entrypoint-bundle-typescript-tsconfig.json new file mode 100644 index 000000000..db16ae879 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/devtools_entrypoint-bundle-typescript-tsconfig.json @@ -0,0 +1,43 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../front_end/models/trace", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "devtools_entrypoint-bundle-typescript-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../front_end/models/trace/trace.ts", + "../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "./trace-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/extras/FetchNodes.js b/node_modules/@paulirish/trace_engine/models/trace/extras/FetchNodes.js new file mode 100644 index 000000000..f19803f8e --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/extras/FetchNodes.js @@ -0,0 +1,209 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as SDK from '../../../core/sdk/sdk.js'; +import * as Types from '../types/types.js'; +const domLookUpSingleNodeCache = new Map(); +const domLookUpBatchNodesCache = new Map(); +export function clearCacheForTesting() { + domLookUpSingleNodeCache.clear(); + domLookUpBatchNodesCache.clear(); + layoutShiftSourcesCache.clear(); + normalizedLayoutShiftNodesCache.clear(); +} +/** + * Looks up the DOM Node on the page for the given BackendNodeId. Uses the + * provided TraceParseData as the cache and will cache the result after the + * first lookup. + */ +export async function domNodeForBackendNodeID(modelData, nodeId) { + const fromCache = domLookUpSingleNodeCache.get(modelData)?.get(nodeId); + if (fromCache !== undefined) { + return fromCache; + } + const target = SDK.TargetManager.TargetManager.instance().primaryPageTarget(); + const domModel = target?.model(SDK.DOMModel.DOMModel); + if (!domModel) { + return null; + } + const domNodesMap = await domModel.pushNodesByBackendIdsToFrontend(new Set([nodeId])); + const result = domNodesMap?.get(nodeId) || null; + const cacheForModel = domLookUpSingleNodeCache.get(modelData) || new Map(); + cacheForModel.set(nodeId, result); + domLookUpSingleNodeCache.set(modelData, cacheForModel); + return result; +} +const nodeIdsForEventCache = new WeakMap(); +/** + * Extracts a set of NodeIds for a given event. + * NOTE: you probably don't want to call this and instead use + * `extractRelatedDOMNodesFromEvent`, which will fetch the nodes over CDP. + * This method is primarily exported so we can test the logic more easily + * without having to mock the CDP layer. + **/ +export function nodeIdsForEvent(modelData, event) { + const fromCache = nodeIdsForEventCache.get(event); + if (fromCache) { + return fromCache; + } + const foundIds = new Set(); + if (Types.TraceEvents.isTraceEventLayout(event)) { + event.args.endData.layoutRoots.forEach(root => foundIds.add(root.nodeId)); + } + else if (Types.TraceEvents.isSyntheticLayoutShift(event) && event.args.data?.impacted_nodes) { + event.args.data.impacted_nodes.forEach(node => foundIds.add(node.node_id)); + } + else if (Types.TraceEvents.isTraceEventLargestContentfulPaintCandidate(event) && + typeof event.args.data?.nodeId !== 'undefined') { + foundIds.add(event.args.data.nodeId); + } + else if (Types.TraceEvents.isTraceEventPaint(event) && typeof event.args.data.nodeId !== 'undefined') { + foundIds.add(event.args.data.nodeId); + } + else if (Types.TraceEvents.isTraceEventPaintImage(event) && typeof event.args.data.nodeId !== 'undefined') { + foundIds.add(event.args.data.nodeId); + } + else if (Types.TraceEvents.isTraceEventScrollLayer(event) && typeof event.args.data.nodeId !== 'undefined') { + foundIds.add(event.args.data.nodeId); + } + else if (Types.TraceEvents.isTraceEventDecodeImage(event)) { + // For a DecodeImage event, we can use the ImagePaintingHandler, which has + // done the work to build the relationship between a DecodeImage event and + // the corresponding PaintImage event. + const paintImageEvent = modelData.ImagePainting.paintImageForEvent.get(event); + if (paintImageEvent && typeof paintImageEvent.args.data.nodeId !== 'undefined') { + foundIds.add(paintImageEvent.args.data.nodeId); + } + } + else if (Types.TraceEvents.isTraceEventDrawLazyPixelRef(event) && event.args?.LazyPixelRef) { + const paintImageEvent = modelData.ImagePainting.paintImageByDrawLazyPixelRef.get(event.args.LazyPixelRef); + if (paintImageEvent && typeof paintImageEvent.args.data.nodeId !== 'undefined') { + foundIds.add(paintImageEvent.args.data.nodeId); + } + } + nodeIdsForEventCache.set(event, foundIds); + return foundIds; +} +/** + * Looks up for backend node ids in different types of trace events + * and resolves them into related DOM nodes. + * This method should be progressively updated to support more events + * containing node ids which we want to resolve. + */ +export async function extractRelatedDOMNodesFromEvent(modelData, event) { + const nodeIds = nodeIdsForEvent(modelData, event); + if (nodeIds.size) { + return domNodesForMultipleBackendNodeIds(modelData, Array.from(nodeIds)); + } + return null; +} +/** + * Takes a set of Protocol.DOM.BackendNodeId ids and will return a map of NodeId=>DOMNode. + * Results are cached based on 1) the provided TraceParseData and 2) the provided set of IDs. + */ +export async function domNodesForMultipleBackendNodeIds(modelData, nodeIds) { + const fromCache = domLookUpBatchNodesCache.get(modelData)?.get(nodeIds); + if (fromCache) { + return fromCache; + } + const target = SDK.TargetManager.TargetManager.instance().primaryPageTarget(); + const domModel = target?.model(SDK.DOMModel.DOMModel); + if (!domModel) { + return new Map(); + } + const domNodesMap = await domModel.pushNodesByBackendIdsToFrontend(new Set(nodeIds)) || new Map(); + const cacheForModel = domLookUpBatchNodesCache.get(modelData) || + new Map(); + cacheForModel.set(nodeIds, domNodesMap); + domLookUpBatchNodesCache.set(modelData, cacheForModel); + return domNodesMap; +} +const layoutShiftSourcesCache = new Map(); +const normalizedLayoutShiftNodesCache = new Map(); +/** + * Calculates and returns a list of sources for a LayoutShift. + * Here, a source is considered as a node that moved and contributed to the + * given LayoutShift existing and the score it was given. Each source returned + * contains a reference to the DOM Node, and its dimensions (as a DOMRect), both + * before and now, so we can see how this node changed and how that impacted the + * layout shift. + * + * This data is cached based on the provided model data and the given layout + * shift, so it is is safe to call multiple times with the same input. + */ +export async function sourcesForLayoutShift(modelData, event) { + const fromCache = layoutShiftSourcesCache.get(modelData)?.get(event); + if (fromCache) { + return fromCache; + } + const impactedNodes = event.args.data?.impacted_nodes; + if (!impactedNodes) { + return []; + } + const sources = []; + await Promise.all(impactedNodes.map(async (node) => { + const domNode = await domNodeForBackendNodeID(modelData, node.node_id); + if (domNode) { + sources.push({ + previousRect: new DOMRect(node.old_rect[0], node.old_rect[1], node.old_rect[2], node.old_rect[3]), + currentRect: new DOMRect(node.new_rect[0], node.new_rect[1], node.new_rect[2], node.new_rect[3]), + node: domNode, + }); + } + })); + const cacheForModel = layoutShiftSourcesCache.get(modelData) || new Map(); + cacheForModel.set(event, sources); + layoutShiftSourcesCache.set(modelData, cacheForModel); + return sources; +} +/** + * Takes a LayoutShift and normalizes its node dimensions based on the device + * pixel ratio (DPR) of the user's display. + * This is required because the Layout Instability API is not based on CSS + * pixels, but physical pixels. Therefore we need to map these to normalized CSS + * pixels if we can. For example, if the user is on a device with a DPR of 2, + * the values of the node dimensions reported by the Instability API need to be + * divided by 2 to be accurate. + * This function is safe to call multiple times as results are cached based on + * the provided model data. + * See https://crbug.com/1300309 for details. + */ +export async function normalizedImpactedNodesForLayoutShift(modelData, event) { + const fromCache = normalizedLayoutShiftNodesCache.get(modelData)?.get(event); + if (fromCache) { + return fromCache; + } + const impactedNodes = event.args?.data?.impacted_nodes; + if (!impactedNodes) { + return []; + } + let viewportScale = null; + const target = SDK.TargetManager.TargetManager.instance().primaryPageTarget(); + // Get the CSS-to-physical pixel ratio of the device the inspected + // target is running at. + const evaluateResult = await target?.runtimeAgent().invoke_evaluate({ expression: 'window.devicePixelRatio' }); + if (evaluateResult?.result.type === 'number') { + viewportScale = evaluateResult?.result.value ?? null; + } + if (!viewportScale) { + // Bail and return the nodes as is. + return impactedNodes; + } + const normalizedNodes = []; + for (const impactedNode of impactedNodes) { + const newNode = { ...impactedNode }; + for (let i = 0; i < impactedNode.old_rect.length; i++) { + newNode.old_rect[i] /= viewportScale; + } + for (let i = 0; i < impactedNode.new_rect.length; i++) { + newNode.new_rect[i] /= viewportScale; + } + normalizedNodes.push(newNode); + } + const cacheForModel = normalizedLayoutShiftNodesCache.get(modelData) || + new Map(); + cacheForModel.set(event, normalizedNodes); + normalizedLayoutShiftNodesCache.set(modelData, cacheForModel); + return normalizedNodes; +} +//# sourceMappingURL=FetchNodes.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/extras/FilmStrip.js b/node_modules/@paulirish/trace_engine/models/trace/extras/FilmStrip.js new file mode 100644 index 000000000..2970752c5 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/extras/FilmStrip.js @@ -0,0 +1,44 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +// +import * as Platform from '../../../core/platform/platform.js'; +// Cache film strips based on: +// 1. The trace parsed data object +// 2. The start time. +const filmStripCache = new Map(); +export function fromTraceData(traceData, customZeroTime) { + const frames = []; + const zeroTime = typeof customZeroTime !== 'undefined' ? customZeroTime : traceData.Meta.traceBounds.min; + const spanTime = traceData.Meta.traceBounds.range; + const fromCache = filmStripCache.get(traceData)?.get(zeroTime); + if (fromCache) { + return fromCache; + } + for (const screenshotEvent of traceData.Screenshots) { + if (screenshotEvent.ts < zeroTime) { + continue; + } + const frame = { + index: frames.length, + screenshotEvent: screenshotEvent, + }; + frames.push(frame); + } + const result = { + zeroTime, + spanTime, + frames: Array.from(frames), + }; + const cachedForData = Platform.MapUtilities.getWithDefault(filmStripCache, traceData, () => new Map()); + cachedForData.set(zeroTime, result); + return result; +} +export function frameClosestToTimestamp(filmStrip, searchTimestamp) { + const closestFrameIndexBeforeTimestamp = Platform.ArrayUtilities.nearestIndexFromEnd(filmStrip.frames, frame => frame.screenshotEvent.ts < searchTimestamp); + if (closestFrameIndexBeforeTimestamp === null) { + return null; + } + return filmStrip.frames[closestFrameIndexBeforeTimestamp]; +} +//# sourceMappingURL=FilmStrip.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/extras/MainThreadActivity.js b/node_modules/@paulirish/trace_engine/models/trace/extras/MainThreadActivity.js new file mode 100644 index 000000000..65cf2bca8 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/extras/MainThreadActivity.js @@ -0,0 +1,77 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +const IDLE_FUNCTION_CALL_NAMES = new Set([ + '(program)', + '(idle)', + '(root)', +]); +export function calculateWindow(traceBounds, mainThreadEntries) { + if (!mainThreadEntries.length) { + return traceBounds; + } + const entriesWithIdleRemoved = mainThreadEntries.filter(entry => { + if (Types.TraceEvents.isProfileCall(entry) && + (IDLE_FUNCTION_CALL_NAMES.has(entry.callFrame.functionName) || !entry.callFrame.functionName)) { + return false; + } + return true; + }); + if (entriesWithIdleRemoved.length === 0) { + return traceBounds; + } + /** + * Calculates regions of low utilization and returns the index of the event + * that is the first event that should be included. + **/ + function findLowUtilizationRegion(startIndex, stopIndex) { + const threshold = 0.1; + let cutIndex = startIndex; + const entryAtCut = entriesWithIdleRemoved[cutIndex]; + const timings = Helpers.Timing.eventTimingsMicroSeconds(entryAtCut); + let cutTime = (timings.startTime + timings.endTime) / 2; + let usedTime = 0; + const step = Math.sign(stopIndex - startIndex); + for (let i = startIndex; i !== stopIndex; i += step) { + const task = entriesWithIdleRemoved[i]; + const taskTimings = Helpers.Timing.eventTimingsMicroSeconds(task); + const taskTime = (taskTimings.startTime + taskTimings.endTime) / 2; + const interval = Math.abs(cutTime - taskTime); + if (usedTime < threshold * interval) { + cutIndex = i; + cutTime = taskTime; + usedTime = 0; + } + usedTime += taskTimings.duration; + } + return cutIndex; + } + const rightIndex = findLowUtilizationRegion(entriesWithIdleRemoved.length - 1, 0); + const leftIndex = findLowUtilizationRegion(0, rightIndex); + const leftTimings = Helpers.Timing.eventTimingsMicroSeconds(entriesWithIdleRemoved[leftIndex]); + const rightTimings = Helpers.Timing.eventTimingsMicroSeconds(entriesWithIdleRemoved[rightIndex]); + let leftTime = leftTimings.startTime; + let rightTime = rightTimings.endTime; + const zoomedInSpan = rightTime - leftTime; + if (zoomedInSpan < traceBounds.range * 0.1) { + // If the area we have chosen to zoom into is less than 10% of the entire + // span, we bail and show the entire trace. It would not be so useful to + // the user to zoom in on such a small area; we assume they have + // purposefully recorded a trace that contains empty periods of time. + return traceBounds; + } + // Adjust the left time down by 5%, and the right time up by 5%, so that + // we give the range we want to zoom a bit of breathing space. At the + // same time, ensure that we do not stray beyond the bounds of the + // min/max time of the entire trace. + leftTime = Types.Timing.MicroSeconds(Math.max(leftTime - 0.05 * zoomedInSpan, traceBounds.min)); + rightTime = Types.Timing.MicroSeconds(Math.min(rightTime + 0.05 * zoomedInSpan, traceBounds.max)); + return { + min: leftTime, + max: rightTime, + range: Types.Timing.MicroSeconds(rightTime - leftTime), + }; +} +//# sourceMappingURL=MainThreadActivity.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/extras/Metadata.js b/node_modules/@paulirish/trace_engine/models/trace/extras/Metadata.js new file mode 100644 index 000000000..fc88c1787 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/extras/Metadata.js @@ -0,0 +1,52 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as SDK from '../../../core/sdk/sdk.js'; +import * as Types from '../types/types.js'; +export async function forNewRecording(isCpuProfile, recordStartTime) { + try { + if (isCpuProfile) { + // For CPU profile, only specify data origin + return { + dataOrigin: "CPUProfile" /* Types.File.DataOrigin.CPUProfile */, + }; + } + const cpuThrottlingManager = SDK.CPUThrottlingManager.CPUThrottlingManager.instance(); + // If the CPU Throttling manager has yet to have its primary page target + // set, it will block on the call to get the current hardware concurrency + // until it does. At this point where the user has recorded a trace, that + // target should have been set. So if it doesn't have it set, we instead + // just bail and don't store the hardware concurrency (this is only + // metadata, not mission critical information). + // We also race this call against a 1s timeout, because sometimes this call + // can hang (unsure exactly why) and we do not want to block parsing for + // too long as a result. + function getConcurrencyOrTimeout() { + return Promise.race([ + SDK.CPUThrottlingManager.CPUThrottlingManager.instance().getHardwareConcurrency(), + new Promise(resolve => { + setTimeout(() => resolve(undefined), 1_000); + }), + ]); + } + const hardwareConcurrency = cpuThrottlingManager.hasPrimaryPageTargetSet() ? await getConcurrencyOrTimeout() : undefined; + const cpuThrottling = SDK.CPUThrottlingManager.CPUThrottlingManager.instance().cpuThrottlingRate(); + const networkConditions = SDK.NetworkManager.MultitargetNetworkManager.instance().networkConditions(); + const networkTitle = typeof networkConditions.title === 'function' ? networkConditions.title() : networkConditions.title; + return { + source: 'DevTools', + startTime: recordStartTime ? new Date(recordStartTime).toJSON() : undefined, // ISO-8601 timestamp + cpuThrottling, + networkThrottling: networkTitle, + hardwareConcurrency, + dataOrigin: "TraceEvents" /* Types.File.DataOrigin.TraceEvents */, + }; + } + catch { + // If anything went wrong, it does not really matter. The impact is that we + // will not save the metadata when we save the trace to disk, but that is + // not really important, so just return empty object and move on + return {}; + } +} +//# sourceMappingURL=Metadata.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/extras/bundle-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/extras/bundle-tsconfig.json new file mode 100644 index 000000000..417d848d7 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/extras/bundle-tsconfig.json @@ -0,0 +1 @@ +{"compilerOptions":{"composite":true,"outDir":".","baseUrl":".","rootDir":"../../../../../../../front_end/models/trace/extras"},"files":["../../../../../../../front_end/models/trace/extras/extras.ts"],"references":[{"path":"./extras-tsconfig.json"}]} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/extras/devtools_entrypoint-bundle-typescript-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/extras/devtools_entrypoint-bundle-typescript-tsconfig.json new file mode 100644 index 000000000..9030a98f5 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/extras/devtools_entrypoint-bundle-typescript-tsconfig.json @@ -0,0 +1,43 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../../front_end/models/trace/extras", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "devtools_entrypoint-bundle-typescript-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../../front_end/models/trace/extras/extras.ts", + "../../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "./extras-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/extras/extras-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/extras/extras-tsconfig.json new file mode 100644 index 000000000..22077768b --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/extras/extras-tsconfig.json @@ -0,0 +1,58 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../../front_end/models/trace/extras", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "extras-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../../front_end/models/trace/extras/FetchNodes.ts", + "../../../../../../../front_end/models/trace/extras/FilmStrip.ts", + "../../../../../../../front_end/models/trace/extras/MainThreadActivity.ts", + "../../../../../../../front_end/models/trace/extras/Metadata.ts", + "../../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "../../../core/platform/bundle-tsconfig.json" + }, + { + "path": "../../../core/sdk/bundle-tsconfig.json" + }, + { + "path": "../../../generated/protocol-tsconfig.json" + }, + { + "path": "../handlers/bundle-tsconfig.json" + }, + { + "path": "../types/bundle-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/extras/extras.js b/node_modules/@paulirish/trace_engine/models/trace/extras/extras.js new file mode 100644 index 000000000..cb0ff5c3b --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/extras/extras.js @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/AnimationHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/AnimationHandler.js new file mode 100644 index 000000000..abb8e977f --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/AnimationHandler.js @@ -0,0 +1,32 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +const animations = []; +const animationsSyntheticEvents = []; +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +export function reset() { + animations.length = 0; + animationsSyntheticEvents.length = 0; +} +export function handleEvent(event) { + if (Types.TraceEvents.isTraceEventAnimation(event)) { + animations.push(event); + return; + } +} +export async function finalize() { + const syntheticEvents = Helpers.Trace.createMatchedSortedSyntheticEvents(animations); + animationsSyntheticEvents.push(...syntheticEvents); + handlerState = 3 /* HandlerState.FINALIZED */; +} +export function data() { + if (handlerState !== 3 /* HandlerState.FINALIZED */) { + throw new Error('Animation handler is not finalized'); + } + return { + animations: animationsSyntheticEvents, + }; +} +//# sourceMappingURL=AnimationHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/AuctionWorkletsHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/AuctionWorkletsHandler.js new file mode 100644 index 000000000..ee39f63cd --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/AuctionWorkletsHandler.js @@ -0,0 +1,161 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Types from '../types/types.js'; +/** + * There are two metadata events that we care about. + * => AuctionWorkletRunningInProcess tells us which process the Auction Worklet + * has taken to run in. + * => AuctionWorkletDoneWithProcess tells us when the worklet is done with that + * process. This is less useful - but in the future we might want to surface + * this information so we still parse and return the event. + * + * It is important to note that the top level PID on these events is NOT the + * PID that the worklet is running on; instead we have to look at its + * args.data.pid property, which is the PID of the process that it is running + * on. + * + * For any given RunningInProcess event, we would typically expect to see a + * DoneWithProcess event, however this is not guaranteed, especially as users + * can record any chunk of time in DevTools. + * + * Similarly, it is also possible to see a DoneWithProcess event without a + * RunningInProcess event, if the user started recording after the auction + * worklets started. Therefore we are happy to create + * SyntheticAuctionWorkletEvents as long as we see just one of these events. + * + * If we do get two events and need to pair them, we can use the + * args.data.target property, which is a string ID shared by both + * events. + */ +const runningInProcessEvents = new Map(); +const doneWithProcessEvents = new Map(); +// Keyed by the PID defined in `args.data.pid` on AuctionWorklet trace events.. +const createdSyntheticEvents = new Map(); +// Each AuctonWorklet takes over a process and has 2 threads (that we care +// about and want to show as tracks): +// 1. A CrUtilityMain thread which is known as the "control process". +// 2. A AuctionV8HelperThread which is the actual auction worklet and will be +// either a "Seller" or a "Bidder" +// To detect these we look for the metadata thread_name events. We key these by +// PID so that we can easily look them up later without having to loop through. +const utilityThreads = new Map(); +const v8HelperThreads = new Map(); +export function reset() { + runningInProcessEvents.clear(); + doneWithProcessEvents.clear(); + createdSyntheticEvents.clear(); + utilityThreads.clear(); + v8HelperThreads.clear(); +} +export function handleEvent(event) { + if (Types.TraceEvents.isTraceEventAuctionWorkletRunningInProcess(event)) { + runningInProcessEvents.set(event.args.data.pid, event); + return; + } + if (Types.TraceEvents.isTraceEventAuctionWorkletDoneWithProcess(event)) { + doneWithProcessEvents.set(event.args.data.pid, event); + return; + } + if (Types.TraceEvents.isThreadName(event)) { + if (event.args.name === 'auction_worklet.CrUtilityMain') { + utilityThreads.set(event.pid, event); + return; + } + if (event.args.name === 'AuctionV8HelperThread') { + v8HelperThreads.set(event.pid, event); + } + } +} +function workletType(input) { + switch (input) { + case 'seller': + return "seller" /* Types.TraceEvents.AuctionWorkletType.SELLER */; + case 'bidder': + return "bidder" /* Types.TraceEvents.AuctionWorkletType.BIDDER */; + default: + return "unknown" /* Types.TraceEvents.AuctionWorkletType.UNKNOWN */; + } +} +/** + * We cannot make the full event without knowing the type of event, but we can + * create everything other than the `args` field, as those are identical + * regardless of the type of event. + */ +function makeSyntheticEventBase(event) { + return { + rawSourceEvent: event, + name: 'SyntheticAuctionWorkletEvent', + s: "t" /* Types.TraceEvents.TraceEventScope.THREAD */, + cat: event.cat, + tid: event.tid, + ts: event.ts, + ph: "I" /* Types.TraceEvents.Phase.INSTANT */, + pid: event.args.data.pid, + host: event.args.data.host, + target: event.args.data.target, + type: workletType(event.args.data.type), + }; +} +export async function finalize() { + // Loop through the utility threads we found to create the worklet events. We + // expect each worklet to have a utility thread, so we can use them as the + // root of our list of worklets. + for (const [pid, utilityThreadNameEvent] of utilityThreads) { + const v8HelperEvent = v8HelperThreads.get(pid); + if (!v8HelperEvent) { + // Bad trace data - AuctionWorklets are expected to always have both threads. + continue; + } + const runningEvent = runningInProcessEvents.get(pid); + const doneWithEvent = doneWithProcessEvents.get(pid); + // We can create a worklet from either the runningEvent or doneWithEvent - + // we do not need both. We cannot express that to TypeScript with an early + // return here, so instead we set the event initially to null, and then + // create it from either the running event or the doneWith event. If it is + // still null after this, that means neither event was found, and we drop + // the worklet as we do not have enough information to create the synthetic + // event. + let syntheticEvent = null; + if (runningEvent) { + syntheticEvent = { + ...makeSyntheticEventBase(runningEvent), + args: { + data: { + runningInProcessEvent: runningEvent, + utilityThread: utilityThreadNameEvent, + v8HelperThread: v8HelperEvent, + }, + }, + }; + if (doneWithEvent) { + syntheticEvent.args.data.doneWithProcessEvent = doneWithEvent; + } + } + else if (doneWithEvent) { + syntheticEvent = { + ...makeSyntheticEventBase(doneWithEvent), + args: { + data: { + doneWithProcessEvent: doneWithEvent, + utilityThread: utilityThreadNameEvent, + v8HelperThread: v8HelperEvent, + }, + }, + }; + if (runningEvent) { + syntheticEvent.args.data.runningInProcessEvent = runningEvent; + } + } + if (syntheticEvent === null) { + continue; + } + createdSyntheticEvents.set(pid, syntheticEvent); + } +} +export function data() { + return { + worklets: new Map(createdSyntheticEvents), + }; +} +//# sourceMappingURL=AuctionWorkletsHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/ExtensionTraceDataHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/ExtensionTraceDataHandler.js new file mode 100644 index 000000000..a1d9ca5f1 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/ExtensionTraceDataHandler.js @@ -0,0 +1,106 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +import { data as userTimingsData } from './UserTimingsHandler.js'; +const extensionFlameChartEntries = []; +const extensionTrackData = []; +const extensionMarkers = []; +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +export function handleEvent(_event) { + // Implementation not needed because data is sourced from UserTimingsHandler +} +export function reset() { + handlerState = 2 /* HandlerState.INITIALIZED */; + extensionFlameChartEntries.length = 0; + extensionTrackData.length = 0; + extensionMarkers.length = 0; +} +export async function finalize() { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('ExtensionTraceData handler is not initialized'); + } + createExtensionFlameChartEntries(); + handlerState = 3 /* HandlerState.FINALIZED */; +} +function createExtensionFlameChartEntries() { + const pairedMeasures = userTimingsData().performanceMeasures; + const marks = userTimingsData().performanceMarks; + const mergedRawExtensionEvents = Helpers.Trace.mergeEventsInOrder(pairedMeasures, marks); + extractExtensionEntries(mergedRawExtensionEvents); + Helpers.Extensions.buildTrackDataFromExtensionEntries(extensionFlameChartEntries, extensionTrackData); +} +export function extractExtensionEntries(timings) { + for (const timing of timings) { + const extensionPayload = extensionDataInTiming(timing); + if (!extensionPayload) { + // Not an extension user timing. + continue; + } + const extensionName = extensionPayload.metadata.extensionName; + if (!extensionName) { + continue; + } + const extensionSyntheticEntry = { + name: timing.name, + ph: "X" /* Types.TraceEvents.Phase.COMPLETE */, + pid: Types.TraceEvents.ProcessID(0), + tid: Types.TraceEvents.ThreadID(0), + ts: timing.ts, + selfTime: Types.Timing.MicroSeconds(0), + dur: timing.dur, + cat: 'devtools.extension', + args: extensionPayload, + }; + if (Types.Extensions.isExtensionPayloadMarker(extensionPayload)) { + extensionMarkers.push(extensionSyntheticEntry); + continue; + } + if (Types.Extensions.isExtensionPayloadFlameChartEntry(extensionPayload)) { + extensionFlameChartEntries.push(extensionSyntheticEntry); + continue; + } + } +} +export function extensionDataInTiming(timing) { + const timingDetail = Types.TraceEvents.isTraceEventPerformanceMark(timing) ? timing.args.data?.detail : + timing.args.data.beginEvent.args.detail; + if (!timingDetail) { + return null; + } + try { + // Attempt to parse the detail as an object that might be coming from a + // DevTools Perf extension. + // Wrapped in a try-catch because timingDetail might either: + // 1. Not be `json.parse`-able (it should, but just in case...) + // 2.Not be an object - in which case the `in` check will error. + // If we hit either of these cases, we just ignore this mark and move on. + const detailObj = JSON.parse(timingDetail); + if (!('devtools' in detailObj)) { + return null; + } + if (!('metadata' in detailObj['devtools'])) { + return null; + } + return detailObj.devtools; + } + catch (e) { + // No need to worry about this error, just discard this event and don't + // treat it as having any useful information for the purposes of extensions + return null; + } +} +export function data() { + if (handlerState !== 3 /* HandlerState.FINALIZED */) { + throw new Error('ExtensionTraceData handler is not finalized'); + } + return { + extensionTrackData: [...extensionTrackData], + extensionMarkers: [...extensionMarkers], + }; +} +export function deps() { + return ['UserTimings']; +} +//# sourceMappingURL=ExtensionTraceDataHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/FramesHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/FramesHandler.js new file mode 100644 index 000000000..9dcff8408 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/FramesHandler.js @@ -0,0 +1,457 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../../core/platform/platform.js'; +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +import { data as auctionWorkletsData } from './AuctionWorkletsHandler.js'; +import { data as layerTreeHandlerData } from './LayerTreeHandler.js'; +import { data as metaHandlerData } from './MetaHandler.js'; +import { data as rendererHandlerData } from './RendererHandler.js'; +import * as Threads from './Threads.js'; +/** + * IMPORTANT: this handler is slightly different to the rest. This is because + * it is an adaptation of the TimelineFrameModel that has been used in DevTools + * for many years. Rather than re-implement all the logic from scratch, instead + * this handler gathers up the events and instantitates the class in the + * finalize() method. Once the class has parsed all events, it is used to then + * return the array of frames. + * + * In time we expect to migrate this code to a more "typical" handler. + */ +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +const allEvents = []; +let model = null; +export function reset() { + handlerState = 1 /* HandlerState.UNINITIALIZED */; + allEvents.length = 0; +} +export function initialize() { + if (handlerState !== 1 /* HandlerState.UNINITIALIZED */) { + throw new Error('FramesHandler was not reset before being initialized'); + } + handlerState = 2 /* HandlerState.INITIALIZED */; +} +export function handleEvent(event) { + allEvents.push(event); +} +export async function finalize() { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('FramesHandler is not initialized'); + } + // Snapshot events can be emitted out of order, so we need to sort before + // building the frames model. + Helpers.Trace.sortTraceEventsInPlace(allEvents); + const modelForTrace = new TimelineFrameModel(allEvents, rendererHandlerData(), auctionWorkletsData(), metaHandlerData(), layerTreeHandlerData()); + model = modelForTrace; +} +export function data() { + return { + frames: model ? Array.from(model.frames()) : [], + framesById: model ? { ...model.framesById() } : {}, + }; +} +export function deps() { + return ['Meta', 'Renderer', 'AuctionWorklets', 'LayerTree']; +} +function isFrameEvent(event) { + return (Types.TraceEvents.isTraceEventSetLayerId(event) || Types.TraceEvents.isTraceEventBeginFrame(event) || + Types.TraceEvents.isTraceEventDroppedFrame(event) || + Types.TraceEvents.isTraceEventRequestMainThreadFrame(event) || + Types.TraceEvents.isTraceEventBeginMainThreadFrame(event) || + Types.TraceEvents.isTraceEventNeedsBeginFrameChanged(event) || + // Note that "Commit" is the replacement for "CompositeLayers" so in a trace + // we wouldn't expect to see a combination of these. All "new" trace + // recordings use "Commit", but we can easily support "CompositeLayers" too + // to not break older traces being imported. + Types.TraceEvents.isTraceEventCommit(event) || Types.TraceEvents.isTraceEventCompositeLayers(event) || + Types.TraceEvents.isTraceEventActivateLayerTree(event) || Types.TraceEvents.isTraceEventDrawFrame(event)); +} +function entryIsTopLevel(entry) { + const devtoolsTimelineCategory = 'disabled-by-default-devtools.timeline'; + return entry.name === "RunTask" /* Types.TraceEvents.KnownEventName.RunTask */ && entry.cat.includes(devtoolsTimelineCategory); +} +export class TimelineFrameModel { + #frames = []; + #frameById = {}; + #beginFrameQueue = new TimelineFrameBeginFrameQueue(); + #lastFrame = null; + #mainFrameCommitted = false; + #mainFrameRequested = false; + #lastLayerTree = null; + #framePendingActivation = null; + #framePendingCommit = null; + #lastBeginFrame = null; + #lastNeedsBeginFrame = null; + #lastTaskBeginTime = null; + #layerTreeId = null; + #activeProcessId = null; + #activeThreadId = null; + #layerTreeData; + constructor(allEvents, rendererData, auctionWorkletsData, metaData, layerTreeData) { + // We only care about getting threads from the Renderer, not Samples, + // because Frames don't exist in a CPU Profile (which won't have Renderer + // threads.) + const mainThreads = Threads.threadsInRenderer(rendererData, auctionWorkletsData).filter(thread => { + return thread.type === "MAIN_THREAD" /* Threads.ThreadType.MAIN_THREAD */ && thread.processIsOnMainFrame; + }); + const threadData = mainThreads.map(thread => { + return { + tid: thread.tid, + pid: thread.pid, + startTime: thread.entries[0].ts, + }; + }); + this.#layerTreeData = layerTreeData; + this.#addTraceEvents(allEvents, threadData, metaData.mainFrameId); + } + framesById() { + return this.#frameById; + } + frames() { + return this.#frames; + } + #handleBeginFrame(startTime, seqId) { + if (!this.#lastFrame) { + this.#startFrame(startTime, seqId); + } + this.#lastBeginFrame = startTime; + this.#beginFrameQueue.addFrameIfNotExists(seqId, startTime, false, false); + } + #handleDroppedFrame(startTime, seqId, isPartial) { + if (!this.#lastFrame) { + this.#startFrame(startTime, seqId); + } + // This line handles the case where no BeginFrame event is issued for + // the dropped frame. In this situation, add a BeginFrame to the queue + // as if it actually occurred. + this.#beginFrameQueue.addFrameIfNotExists(seqId, startTime, true, isPartial); + this.#beginFrameQueue.setDropped(seqId, true); + this.#beginFrameQueue.setPartial(seqId, isPartial); + } + #handleDrawFrame(startTime, seqId) { + if (!this.#lastFrame) { + this.#startFrame(startTime, seqId); + return; + } + // - if it wasn't drawn, it didn't happen! + // - only show frames that either did not wait for the main thread frame or had one committed. + if (this.#mainFrameCommitted || !this.#mainFrameRequested) { + if (this.#lastNeedsBeginFrame) { + const idleTimeEnd = this.#framePendingActivation ? this.#framePendingActivation.triggerTime : + (this.#lastBeginFrame || this.#lastNeedsBeginFrame); + if (idleTimeEnd > this.#lastFrame.startTime) { + this.#lastFrame.idle = true; + this.#lastBeginFrame = null; + } + this.#lastNeedsBeginFrame = null; + } + const framesToVisualize = this.#beginFrameQueue.processPendingBeginFramesOnDrawFrame(seqId); + // Visualize the current frame and all pending frames before it. + for (const frame of framesToVisualize) { + const isLastFrameIdle = this.#lastFrame.idle; + // If |frame| is the first frame after an idle period, the CPU time + // will be logged ("committed") under |frame| if applicable. + this.#startFrame(frame.startTime, seqId); + if (isLastFrameIdle && this.#framePendingActivation) { + this.#commitPendingFrame(); + } + if (frame.isDropped) { + this.#lastFrame.dropped = true; + } + if (frame.isPartial) { + this.#lastFrame.isPartial = true; + } + } + } + this.#mainFrameCommitted = false; + } + #handleActivateLayerTree() { + if (!this.#lastFrame) { + return; + } + if (this.#framePendingActivation && !this.#lastNeedsBeginFrame) { + this.#commitPendingFrame(); + } + } + #handleRequestMainThreadFrame() { + if (!this.#lastFrame) { + return; + } + this.#mainFrameRequested = true; + } + #handleCommit() { + if (!this.#framePendingCommit) { + return; + } + this.#framePendingActivation = this.#framePendingCommit; + this.#framePendingCommit = null; + this.#mainFrameRequested = false; + this.#mainFrameCommitted = true; + } + #handleLayerTreeSnapshot(layerTree) { + this.#lastLayerTree = layerTree; + } + #handleNeedFrameChanged(startTime, needsBeginFrame) { + if (needsBeginFrame) { + this.#lastNeedsBeginFrame = startTime; + } + } + #startFrame(startTime, seqId) { + if (this.#lastFrame) { + this.#flushFrame(this.#lastFrame, startTime); + } + this.#lastFrame = + new TimelineFrame(seqId, startTime, Types.Timing.MicroSeconds(startTime - metaHandlerData().traceBounds.min)); + } + #flushFrame(frame, endTime) { + frame.setLayerTree(this.#lastLayerTree); + frame.setEndTime(endTime); + if (this.#lastLayerTree) { + this.#lastLayerTree.paints = frame.paints; + } + const lastFrame = this.#frames[this.#frames.length - 1]; + if (this.#frames.length && lastFrame && + (frame.startTime !== lastFrame.endTime || frame.startTime > frame.endTime)) { + console.assert(false, `Inconsistent frame time for frame ${this.#frames.length} (${frame.startTime} - ${frame.endTime})`); + } + this.#frames.push(frame); + if (typeof frame.mainFrameId === 'number') { + this.#frameById[frame.mainFrameId] = frame; + } + } + #commitPendingFrame() { + if (!this.#framePendingActivation || !this.#lastFrame) { + return; + } + this.#lastFrame.paints = this.#framePendingActivation.paints; + this.#lastFrame.mainFrameId = this.#framePendingActivation.mainFrameId; + this.#framePendingActivation = null; + } + #addTraceEvents(events, threadData, mainFrameId) { + let j = 0; + this.#activeThreadId = threadData.length && threadData[0].tid || null; + this.#activeProcessId = threadData.length && threadData[0].pid || null; + for (let i = 0; i < events.length; ++i) { + while (j + 1 < threadData.length && threadData[j + 1].startTime <= events[i].ts) { + this.#activeThreadId = threadData[++j].tid; + this.#activeProcessId = threadData[j].pid; + } + this.#addTraceEvent(events[i], mainFrameId); + } + this.#activeThreadId = null; + this.#activeProcessId = null; + } + #addTraceEvent(event, mainFrameId) { + if (Types.TraceEvents.isTraceEventSetLayerId(event) && event.args.data.frame === mainFrameId) { + this.#layerTreeId = event.args.data.layerTreeId; + } + else if (Types.TraceEvents.isTraceEventLayerTreeHostImplSnapshot(event) && Number(event.id) === this.#layerTreeId) { + this.#handleLayerTreeSnapshot({ + entry: event, + paints: [], + }); + } + else { + if (isFrameEvent(event)) { + this.#processCompositorEvents(event); + } + // Make sure we only use events from the main thread: we check the PID as + // well in case two processes have a thread with the same TID. + if (event.tid === this.#activeThreadId && event.pid === this.#activeProcessId) { + this.#addMainThreadTraceEvent(event); + } + } + } + #processCompositorEvents(entry) { + if (entry.args['layerTreeId'] !== this.#layerTreeId) { + return; + } + if (Types.TraceEvents.isTraceEventBeginFrame(entry)) { + this.#handleBeginFrame(entry.ts, entry.args['frameSeqId']); + } + else if (Types.TraceEvents.isTraceEventDrawFrame(entry)) { + this.#handleDrawFrame(entry.ts, entry.args['frameSeqId']); + } + else if (Types.TraceEvents.isTraceEventActivateLayerTree(entry)) { + this.#handleActivateLayerTree(); + } + else if (Types.TraceEvents.isTraceEventRequestMainThreadFrame(entry)) { + this.#handleRequestMainThreadFrame(); + } + else if (Types.TraceEvents.isTraceEventNeedsBeginFrameChanged(entry)) { + // needsBeginFrame property will either be 0 or 1, which represents + // true/false in this case, hence the Boolean() wrapper. + this.#handleNeedFrameChanged(entry.ts, entry.args['data'] && Boolean(entry.args['data']['needsBeginFrame'])); + } + else if (Types.TraceEvents.isTraceEventDroppedFrame(entry)) { + this.#handleDroppedFrame(entry.ts, entry.args['frameSeqId'], Boolean(entry.args['hasPartialUpdate'])); + } + } + #addMainThreadTraceEvent(entry) { + if (entryIsTopLevel(entry)) { + this.#lastTaskBeginTime = entry.ts; + } + if (!this.#framePendingCommit && MAIN_FRAME_MARKERS.has(entry.name)) { + this.#framePendingCommit = new PendingFrame(this.#lastTaskBeginTime || entry.ts); + } + if (!this.#framePendingCommit) { + return; + } + if (Types.TraceEvents.isTraceEventBeginMainThreadFrame(entry) && entry.args.data.frameId) { + this.#framePendingCommit.mainFrameId = entry.args.data.frameId; + } + if (Types.TraceEvents.isTraceEventPaint(entry)) { + const snapshot = this.#layerTreeData.paintsToSnapshots.get(entry); + if (snapshot) { + this.#framePendingCommit.paints.push(new LayerPaintEvent(entry, snapshot)); + } + } + // Commit will be replacing CompositeLayers but CompositeLayers is kept + // around for backwards compatibility. + if ((Types.TraceEvents.isTraceEventCompositeLayers(entry) || Types.TraceEvents.isTraceEventCommit(entry)) && + entry.args['layerTreeId'] === this.#layerTreeId) { + this.#handleCommit(); + } + } +} +const MAIN_FRAME_MARKERS = new Set([ + "ScheduleStyleRecalculation" /* Types.TraceEvents.KnownEventName.ScheduleStyleRecalculation */, + "InvalidateLayout" /* Types.TraceEvents.KnownEventName.InvalidateLayout */, + "BeginMainThreadFrame" /* Types.TraceEvents.KnownEventName.BeginMainThreadFrame */, + "ScrollLayer" /* Types.TraceEvents.KnownEventName.ScrollLayer */, +]); +export class TimelineFrame { + startTime; + startTimeOffset; + endTime; + duration; + idle; + dropped; + isPartial; + layerTree; + paints; + mainFrameId; + seqId; + constructor(seqId, startTime, startTimeOffset) { + this.seqId = seqId; + this.startTime = startTime; + this.startTimeOffset = startTimeOffset; + this.endTime = this.startTime; + this.duration = Types.Timing.MicroSeconds(0); + this.idle = false; + this.dropped = false; + this.isPartial = false; + this.layerTree = null; + this.paints = []; + this.mainFrameId = undefined; + } + setEndTime(endTime) { + this.endTime = endTime; + this.duration = Types.Timing.MicroSeconds(this.endTime - this.startTime); + } + setLayerTree(layerTree) { + this.layerTree = layerTree; + } +} +export class LayerPaintEvent { + #event; + #snapshot; + constructor(event, snapshot) { + this.#event = event; + this.#snapshot = snapshot; + } + layerId() { + return this.#event.args.data.layerId; + } + event() { + return this.#event; + } + picture() { + const rect = this.#snapshot.args.snapshot.params?.layer_rect; + const pictureData = this.#snapshot.args.snapshot.skp64; + return rect && pictureData ? { rect: rect, serializedPicture: pictureData } : null; + } +} +export class PendingFrame { + paints; + mainFrameId; + triggerTime; + constructor(triggerTime) { + this.paints = []; + this.mainFrameId = undefined; + this.triggerTime = triggerTime; + } +} +// The parameters of an impl-side BeginFrame. +class BeginFrameInfo { + seqId; + startTime; + isDropped; + isPartial; + constructor(seqId, startTime, isDropped, isPartial) { + this.seqId = seqId; + this.startTime = startTime; + this.isDropped = isDropped; + this.isPartial = isPartial; + } +} +// A queue of BeginFrames pending visualization. +// BeginFrames are added into this queue as they occur; later when their +// corresponding DrawFrames occur (or lack thereof), the BeginFrames are removed +// from the queue and their timestamps are used for visualization. +export class TimelineFrameBeginFrameQueue { + queueFrames = []; + // Maps frameSeqId to BeginFrameInfo. + mapFrames = {}; + // Add a BeginFrame to the queue, if it does not already exit. + addFrameIfNotExists(seqId, startTime, isDropped, isPartial) { + if (!(seqId in this.mapFrames)) { + this.mapFrames[seqId] = new BeginFrameInfo(seqId, startTime, isDropped, isPartial); + this.queueFrames.push(seqId); + } + } + // Set a BeginFrame in queue as dropped. + setDropped(seqId, isDropped) { + if (seqId in this.mapFrames) { + this.mapFrames[seqId].isDropped = isDropped; + } + } + setPartial(seqId, isPartial) { + if (seqId in this.mapFrames) { + this.mapFrames[seqId].isPartial = isPartial; + } + } + processPendingBeginFramesOnDrawFrame(seqId) { + const framesToVisualize = []; + // Do not visualize this frame in the rare case where the current DrawFrame + // does not have a corresponding BeginFrame. + if (seqId in this.mapFrames) { + // Pop all BeginFrames before the current frame, and add only the dropped + // ones in |frames_to_visualize|. + // Non-dropped frames popped here are BeginFrames that are never + // drawn (but not considered dropped either for some reason). + // Those frames do not require an proactive visualization effort and will + // be naturally presented as continuationss of other frames. + while (this.queueFrames[0] !== seqId) { + const currentSeqId = this.queueFrames[0]; + if (this.mapFrames[currentSeqId].isDropped) { + framesToVisualize.push(this.mapFrames[currentSeqId]); + } + delete this.mapFrames[currentSeqId]; + this.queueFrames.shift(); + } + // Pop the BeginFrame associated with the current DrawFrame. + framesToVisualize.push(this.mapFrames[seqId]); + delete this.mapFrames[seqId]; + this.queueFrames.shift(); + } + return framesToVisualize; + } +} +export function framesWithinWindow(frames, startTime, endTime) { + const firstFrame = Platform.ArrayUtilities.lowerBound(frames, startTime || 0, (time, frame) => time - frame.endTime); + const lastFrame = Platform.ArrayUtilities.lowerBound(frames, endTime || Infinity, (time, frame) => time - frame.startTime); + return frames.slice(firstFrame, lastFrame); +} +//# sourceMappingURL=FramesHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/GPUHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/GPUHandler.js new file mode 100644 index 000000000..eb5caf8e4 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/GPUHandler.js @@ -0,0 +1,54 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import { data as metaHandlerData } from './MetaHandler.js'; +import * as Types from '../types/types.js'; +import * as Helpers from '../helpers/helpers.js'; +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +// Each thread contains events. Events indicate the thread and process IDs, which are +// used to store the event in the correct process thread entry below. +const eventsInProcessThread = new Map(); +let mainGPUThreadTasks = []; +export function reset() { + eventsInProcessThread.clear(); + mainGPUThreadTasks = []; + handlerState = 1 /* HandlerState.UNINITIALIZED */; +} +export function initialize() { + if (handlerState !== 1 /* HandlerState.UNINITIALIZED */) { + throw new Error('GPU Handler was not reset before being initialized'); + } + handlerState = 2 /* HandlerState.INITIALIZED */; +} +export function handleEvent(event) { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('GPU Handler is not initialized'); + } + if (!Types.TraceEvents.isTraceEventGPUTask(event)) { + return; + } + Helpers.Trace.addEventToProcessThread(event, eventsInProcessThread); +} +export async function finalize() { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('GPU Handler is not initialized'); + } + const { gpuProcessId, gpuThreadId } = metaHandlerData(); + const gpuThreadsForProcess = eventsInProcessThread.get(gpuProcessId); + if (gpuThreadsForProcess && gpuThreadId) { + mainGPUThreadTasks = gpuThreadsForProcess.get(gpuThreadId) || []; + } + handlerState = 3 /* HandlerState.FINALIZED */; +} +export function data() { + if (handlerState !== 3 /* HandlerState.FINALIZED */) { + throw new Error('GPU Handler is not finalized'); + } + return { + mainGPUThreadTasks, + }; +} +export function deps() { + return ['Meta']; +} +//# sourceMappingURL=GPUHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/ImagePaintingHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/ImagePaintingHandler.js new file mode 100644 index 000000000..15433f6d6 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/ImagePaintingHandler.js @@ -0,0 +1,108 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Types from '../types/types.js'; +/** + * This handler is responsible for the relationships between: + * DecodeImage/ResizeImage, PaintImage and DrawLazyPixelRef events. + * + * When we get a DecodeImage event, we want to associate it to a PaintImage + * event, primarily so we can determine the NodeID of the image that was + * decoded. + * We can do this in two ways: + * + * 1. If there is a PaintImage event on the same thread, use that + * (if there are multiple, use the latest one). + * + * 2. If not, we can find the DecodeLazyPixelRef event on the same thread, and + * use the PaintImage event associated with it via the `LazyPixelRef` key. + */ +// Track paintImageEvents across threads. +const paintImageEvents = new Map(); +const decodeLazyPixelRefEvents = new Map(); +// A DrawLazyPixelRef event will contain a numerical reference in +// args.LazyPixelRef. As we parse each DrawLazyPixelRef, we can assign it to a +// paint event. Later we want to look up paint events by this reference, so we +// store them in this map. +const paintImageByLazyPixelRef = new Map(); +// When we find events that we want to tie to a particular PaintImage event, we add them to this map. +// These are currently only DecodeImage and ResizeImage events, but the type is +// deliberately generic as in the future we might want to add more events that +// have a relationship to a individual PaintImage event. +const eventToPaintImage = new Map(); +export function reset() { + paintImageEvents.clear(); + decodeLazyPixelRefEvents.clear(); + paintImageByLazyPixelRef.clear(); + eventToPaintImage.clear(); +} +export function handleEvent(event) { + if (Types.TraceEvents.isTraceEventPaintImage(event)) { + const forProcess = paintImageEvents.get(event.pid) || + new Map(); + const forThread = forProcess.get(event.tid) || []; + forThread.push(event); + forProcess.set(event.tid, forThread); + paintImageEvents.set(event.pid, forProcess); + return; + } + if (Types.TraceEvents.isTraceEventDecodeLazyPixelRef(event) && typeof event.args?.LazyPixelRef !== 'undefined') { + // Store these because we use them to tie DecodeImage to a PaintEvent. + const forProcess = decodeLazyPixelRefEvents.get(event.pid) || + new Map(); + const forThread = forProcess.get(event.tid) || []; + forThread.push(event); + forProcess.set(event.tid, forThread); + decodeLazyPixelRefEvents.set(event.pid, forProcess); + } + // If we see a DrawLazyPixelRef event, we need to find the last PaintImage + // event on the thread and associate it to the LazyPixelRef that is supplied + // in the DrawLazyPixelRef event. + // This means that later on if we see a DecodeLazyPixelRef event with the + // same LazyPixelRef key, we can find its associated PaintImage event by + // looking it up. + if (Types.TraceEvents.isTraceEventDrawLazyPixelRef(event) && typeof event.args?.LazyPixelRef !== 'undefined') { + const lastPaintEvent = paintImageEvents.get(event.pid)?.get(event.tid)?.at(-1); + if (!lastPaintEvent) { + return; + } + paintImageByLazyPixelRef.set(event.args.LazyPixelRef, lastPaintEvent); + return; + } + if (Types.TraceEvents.isTraceEventDecodeImage(event)) { + // When we see a DecodeImage, we want to associate it to a PaintImage + // event. We try two approaches: + // + // 1. If the thread of the DecodeImage event has a previous PaintImage + // event, that is the associated event. + // + // 2. If that is false, we then look on the thread for a DecodeLazyPixelRef + // event. If we find that, we then look for its associated PaintImage + // event, which we associate via DrawLazyPixelRef events (the code block + // above this one) + // + // 1. Find a PaintImage event on the same thread. If we find it, that's our association done. + const lastPaintImageEventOnThread = paintImageEvents.get(event.pid)?.get(event.tid)?.at(-1); + if (lastPaintImageEventOnThread) { + eventToPaintImage.set(event, lastPaintImageEventOnThread); + return; + } + // 2. Find the last DecodeLazyPixelRef event and, if we find it, find its associated PaintImage event. + const lastDecodeLazyPixelRef = decodeLazyPixelRefEvents.get(event.pid)?.get(event.tid)?.at(-1); + if (!lastDecodeLazyPixelRef || typeof lastDecodeLazyPixelRef.args?.LazyPixelRef === 'undefined') { + return; + } + const paintEvent = paintImageByLazyPixelRef.get(lastDecodeLazyPixelRef.args.LazyPixelRef); + if (!paintEvent) { + return; + } + eventToPaintImage.set(event, paintEvent); + } +} +export function data() { + return { + paintImageByDrawLazyPixelRef: paintImageByLazyPixelRef, + paintImageForEvent: eventToPaintImage, + }; +} +//# sourceMappingURL=ImagePaintingHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/InitiatorsHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/InitiatorsHandler.js new file mode 100644 index 000000000..4daa3b1dd --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/InitiatorsHandler.js @@ -0,0 +1,213 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +const lastScheduleStyleRecalcByFrame = new Map(); +// This tracks the last event that is considered to have invalidated the layout +// for a given frame. +// Note that although there is an InvalidateLayout event, there are also other +// events (ScheduleStyleRecalculation) that could be the reason a layout was +// invalidated. +const lastInvalidationEventForFrame = new Map(); +// Important: although the event is called UpdateLayoutTree, in the UI we +// present these to the user as "Recalculate Style". So don't get confused! +// These are the same - just UpdateLayoutTree is what the event from Chromium +// is called. +const lastUpdateLayoutTreeByFrame = new Map(); +// This tracks postmessage dispatch and handler events for creating initiator association +const postMessageHandlerEvents = []; +const schedulePostMessageEventByTraceId = new Map(); +// These two maps store the same data but in different directions. +// For a given event, tell me what its initiator was. An event can only have one initiator. +const eventToInitiatorMap = new Map(); +// For a given event, tell me what events it initiated. An event can initiate +// multiple events, hence why the value for this map is an array. +const initiatorToEventsMap = new Map(); +const requestAnimationFrameEventsById = new Map(); +const timerInstallEventsById = new Map(); +const requestIdleCallbackEventsById = new Map(); +const webSocketCreateEventsById = new Map(); +export function reset() { + lastScheduleStyleRecalcByFrame.clear(); + lastInvalidationEventForFrame.clear(); + lastUpdateLayoutTreeByFrame.clear(); + timerInstallEventsById.clear(); + eventToInitiatorMap.clear(); + initiatorToEventsMap.clear(); + requestAnimationFrameEventsById.clear(); + requestIdleCallbackEventsById.clear(); + webSocketCreateEventsById.clear(); + schedulePostMessageEventByTraceId.clear(); + postMessageHandlerEvents.length = 0; + handlerState = 1 /* HandlerState.UNINITIALIZED */; +} +export function initialize() { + if (handlerState !== 1 /* HandlerState.UNINITIALIZED */) { + throw new Error('InitiatorsHandler was not reset before being initialized'); + } + handlerState = 2 /* HandlerState.INITIALIZED */; +} +function storeInitiator(data) { + eventToInitiatorMap.set(data.event, data.initiator); + const eventsForInitiator = initiatorToEventsMap.get(data.initiator) || []; + eventsForInitiator.push(data.event); + initiatorToEventsMap.set(data.initiator, eventsForInitiator); +} +export function handleEvent(event) { + if (Types.TraceEvents.isTraceEventScheduleStyleRecalculation(event)) { + lastScheduleStyleRecalcByFrame.set(event.args.data.frame, event); + } + else if (Types.TraceEvents.isTraceEventUpdateLayoutTree(event)) { + // IMPORTANT: although the trace event is called UpdateLayoutTree, this + // represents a Styles Recalculation. This event in the timeline is shown to + // the user as "Recalculate Styles." + if (event.args.beginData) { + // Store the last UpdateLayout event: we use this when we see an + // InvalidateLayout and try to figure out its initiator. + lastUpdateLayoutTreeByFrame.set(event.args.beginData.frame, event); + // If this frame has seen a ScheduleStyleRecalc event, then that event is + // considered to be the initiator of this StylesRecalc. + const scheduledStyleForFrame = lastScheduleStyleRecalcByFrame.get(event.args.beginData.frame); + if (scheduledStyleForFrame) { + storeInitiator({ + event, + initiator: scheduledStyleForFrame, + }); + } + } + } + else if (Types.TraceEvents.isTraceEventInvalidateLayout(event)) { + // By default, the InvalidateLayout event is what triggered the layout invalidation for this frame. + let invalidationInitiator = event; + // However, if we have not had any prior invalidations for this frame, we + // want to consider StyleRecalculation events as they might be the actual + // cause of this layout invalidation. + if (!lastInvalidationEventForFrame.has(event.args.data.frame)) { + // 1. If we have not had an invalidation event for this frame + // 2. AND we have had an UpdateLayoutTree for this frame + // 3. AND the UpdateLayoutTree event ended AFTER the InvalidateLayout startTime + // 4. AND we have an initiator for the UpdateLayoutTree event + // 5. Then we set the last invalidation event for this frame to be the UpdateLayoutTree's initiator. + const lastUpdateLayoutTreeForFrame = lastUpdateLayoutTreeByFrame.get(event.args.data.frame); + if (lastUpdateLayoutTreeForFrame) { + const { endTime } = Helpers.Timing.eventTimingsMicroSeconds(lastUpdateLayoutTreeForFrame); + const initiatorOfUpdateLayout = eventToInitiatorMap.get(lastUpdateLayoutTreeForFrame); + if (initiatorOfUpdateLayout && endTime && endTime > event.ts) { + invalidationInitiator = initiatorOfUpdateLayout; + } + } + } + lastInvalidationEventForFrame.set(event.args.data.frame, invalidationInitiator); + } + else if (Types.TraceEvents.isTraceEventLayout(event)) { + // The initiator of a Layout event is the last Invalidation event. + const lastInvalidation = lastInvalidationEventForFrame.get(event.args.beginData.frame); + if (lastInvalidation) { + storeInitiator({ + event, + initiator: lastInvalidation, + }); + } + // Now clear the last invalidation for the frame: the last invalidation has been linked to a Layout event, so it cannot be the initiator for any future layouts. + lastInvalidationEventForFrame.delete(event.args.beginData.frame); + } + else if (Types.TraceEvents.isTraceEventRequestAnimationFrame(event)) { + requestAnimationFrameEventsById.set(event.args.data.id, event); + } + else if (Types.TraceEvents.isTraceEventFireAnimationFrame(event)) { + // If we get a fire event, that means we should have had the + // RequestAnimationFrame event by now. If so, we can set that as the + // initiator for the fire event. + const matchingRequestEvent = requestAnimationFrameEventsById.get(event.args.data.id); + if (matchingRequestEvent) { + storeInitiator({ + event, + initiator: matchingRequestEvent, + }); + } + } + else if (Types.TraceEvents.isTraceEventTimerInstall(event)) { + timerInstallEventsById.set(event.args.data.timerId, event); + } + else if (Types.TraceEvents.isTraceEventTimerFire(event)) { + const matchingInstall = timerInstallEventsById.get(event.args.data.timerId); + if (matchingInstall) { + storeInitiator({ event, initiator: matchingInstall }); + } + } + else if (Types.TraceEvents.isTraceEventRequestIdleCallback(event)) { + requestIdleCallbackEventsById.set(event.args.data.id, event); + } + else if (Types.TraceEvents.isTraceEventFireIdleCallback(event)) { + const matchingRequestEvent = requestIdleCallbackEventsById.get(event.args.data.id); + if (matchingRequestEvent) { + storeInitiator({ + event, + initiator: matchingRequestEvent, + }); + } + } + else if (Types.TraceEvents.isTraceEventWebSocketCreate(event)) { + webSocketCreateEventsById.set(event.args.data.identifier, event); + } + else if (Types.TraceEvents.isTraceEventWebSocketSendHandshakeRequest(event)) { + const matchingCreateEvent = webSocketCreateEventsById.get(event.args.data.identifier); + if (matchingCreateEvent) { + storeInitiator({ + event, + initiator: matchingCreateEvent, + }); + } + } + else if (Types.TraceEvents.isTraceEventWebSocketSendHandshakeRequest(event) || + Types.TraceEvents.isTraceEventWebSocketReceiveHandshakeResponse(event) || + Types.TraceEvents.isTraceEventWebSocketDestroy(event)) { + const matchingCreateEvent = webSocketCreateEventsById.get(event.args.data.identifier); + if (matchingCreateEvent) { + storeInitiator({ + event, + initiator: matchingCreateEvent, + }); + } + } + // Store schedulePostMessage Events by their traceIds. + // so they can be reconciled later with matching handlePostMessage events with same traceIds. + else if (Types.TraceEvents.isTraceEventHandlePostMessage(event)) { + postMessageHandlerEvents.push(event); + } + else if (Types.TraceEvents.isTraceEventSchedulePostMessage(event)) { + const traceId = event.args.data?.traceId; + if (traceId) { + schedulePostMessageEventByTraceId.set(traceId, event); + } + } +} +function finalizeInitiatorRelationship() { + for (const handlerEvent of postMessageHandlerEvents) { + const traceId = handlerEvent.args.data?.traceId; + const matchingSchedulePostMesssageEvent = schedulePostMessageEventByTraceId.get(traceId); + if (matchingSchedulePostMesssageEvent) { + // Set schedulePostMesssage events as initiators for handler events. + storeInitiator({ event: handlerEvent, initiator: matchingSchedulePostMesssageEvent }); + } + } +} +export async function finalize() { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('InitiatorsHandler is not initialized'); + } + // During event processing, we may encounter initiators before the handler events themselves + // (e.g dispatch events on worker and handler events on the main thread) + // we don't want to miss out on events whose initiators haven't been processed yet + finalizeInitiatorRelationship(); + handlerState = 3 /* HandlerState.FINALIZED */; +} +export function data() { + return { + eventToInitiator: eventToInitiatorMap, + initiatorToEvents: initiatorToEventsMap, + }; +} +//# sourceMappingURL=InitiatorsHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/InvalidationsHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/InvalidationsHandler.js new file mode 100644 index 000000000..4b09f6454 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/InvalidationsHandler.js @@ -0,0 +1,120 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Types from '../types/types.js'; +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +const invalidationsForEvent = new Map(); +let lastRecalcStyleEvent = null; +// Used to track paints so we track invalidations correctly per paint. +let hasPainted = false; +const allInvalidationTrackingEvents = []; +export function reset() { + handlerState = 1 /* HandlerState.UNINITIALIZED */; + invalidationsForEvent.clear(); + lastRecalcStyleEvent = null; + allInvalidationTrackingEvents.length = 0; + hasPainted = false; +} +export function initialize() { + if (handlerState !== 1 /* HandlerState.UNINITIALIZED */) { + throw new Error('InvalidationsHandler was not reset before being initialized'); + } + handlerState = 2 /* HandlerState.INITIALIZED */; +} +function addInvalidationToEvent(event, invalidation) { + const existingInvalidations = invalidationsForEvent.get(event) || []; + const syntheticInvalidation = { + ...invalidation, + name: 'SyntheticInvalidation', + frame: invalidation.args.data.frame, + nodeId: invalidation.args.data.nodeId, + rawEvent: invalidation, + }; + if (invalidation.args.data.nodeName) { + syntheticInvalidation.nodeName = invalidation.args.data.nodeName; + } + if (invalidation.args.data.reason) { + syntheticInvalidation.reason = invalidation.args.data.reason; + } + if (invalidation.args.data.stackTrace) { + syntheticInvalidation.stackTrace = invalidation.args.data.stackTrace; + } + existingInvalidations.push(syntheticInvalidation); + invalidationsForEvent.set(event, existingInvalidations); +} +export function handleEvent(event) { + if (Types.TraceEvents.isTraceEventUpdateLayoutTree(event)) { + lastRecalcStyleEvent = event; + // Associate any prior invalidations with this recalc event. + for (const invalidation of allInvalidationTrackingEvents) { + if (Types.TraceEvents.isTraceEventLayoutInvalidationTracking(invalidation)) { + // LayoutInvalidation events cannot be associated with a LayoutTree + // event. + continue; + } + const recalcFrameId = lastRecalcStyleEvent.args.beginData?.frame; + if (recalcFrameId && invalidation.args.data.frame === recalcFrameId) { + addInvalidationToEvent(event, invalidation); + } + } + return; + } + if (Types.TraceEvents.isTraceEventScheduleStyleInvalidationTracking(event) || + Types.TraceEvents.isTraceEventStyleRecalcInvalidationTracking(event) || + Types.TraceEvents.isTraceEventStyleInvalidatorInvalidationTracking(event) || + Types.TraceEvents.isTraceEventLayoutInvalidationTracking(event)) { + if (hasPainted) { + // If we have painted, then we can clear out the list of all existing + // invalidations, as we cannot associate them across frames. + allInvalidationTrackingEvents.length = 0; + lastRecalcStyleEvent = null; + hasPainted = false; + } + // Style invalidation events can occur before and during recalc styles. When we get a recalc style event (aka TraceEventUpdateLayoutTree), we check and associate any prior invalidations with it. + // But any invalidations that occur during a TraceEventUpdateLayoutTree + // event would be reported in trace events after. So each time we get an + // invalidation that might be due to a style recalc, we check if the + // timings overlap and if so associate them. + if (lastRecalcStyleEvent && + (Types.TraceEvents.isTraceEventScheduleStyleInvalidationTracking(event) || + Types.TraceEvents.isTraceEventStyleRecalcInvalidationTracking(event) || + Types.TraceEvents.isTraceEventStyleInvalidatorInvalidationTracking(event))) { + const recalcEndTime = lastRecalcStyleEvent.ts + (lastRecalcStyleEvent.dur || 0); + if (event.ts >= lastRecalcStyleEvent.ts && event.ts <= recalcEndTime && + lastRecalcStyleEvent.args.beginData?.frame === event.args.data.frame) { + addInvalidationToEvent(lastRecalcStyleEvent, event); + } + } + allInvalidationTrackingEvents.push(event); + return; + } + if (Types.TraceEvents.isTraceEventPaint(event)) { + // Used to ensure that we do not create relationships across frames. + hasPainted = true; + return; + } + if (Types.TraceEvents.isTraceEventLayout(event)) { + const layoutFrame = event.args.beginData.frame; + for (const invalidation of allInvalidationTrackingEvents) { + // The only invalidations that cause a Layout are LayoutInvalidations :) + if (!Types.TraceEvents.isTraceEventLayoutInvalidationTracking(invalidation)) { + continue; + } + if (invalidation.args.data.frame === layoutFrame) { + addInvalidationToEvent(event, invalidation); + } + } + } +} +export async function finalize() { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('InvalidationsHandler is not initialized'); + } + handlerState = 3 /* HandlerState.FINALIZED */; +} +export function data() { + return { + invalidationsForEvent, + }; +} +//# sourceMappingURL=InvalidationsHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/LargestImagePaintHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/LargestImagePaintHandler.js new file mode 100644 index 000000000..2d2662fe7 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/LargestImagePaintHandler.js @@ -0,0 +1,38 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Types from '../types/types.js'; +/** + * If the LCP resource was an image, and that image was fetched over the + * network, we want to be able to find the network request in order to construct + * the critical path for an LCP image. + * Within the trace file there are `LargestImagePaint::Candidate` events. + * Within their data object, they contain a `DOMNodeId` property, which maps to + * the DOM Node ID for that image. + * + * This id maps exactly to the `data.nodeId` property that a + * `LargestContentfulPaint::Candidate` will have. So, when we find an image + * paint candidate, we can store it, keying it on the node ID. + * Then, when it comes to finding the network request for an LCP image, we can + * + * use the nodeId from the LCP candidate to find the image candidate. That image + * candidate also contains a `imageUrl` property, which will have the full URL + * to the image. + **/ +const imageByDOMNodeId = new Map(); +export function reset() { + imageByDOMNodeId.clear(); +} +export function handleEvent(event) { + if (!Types.TraceEvents.isTraceEventLargestImagePaintCandidate(event)) { + return; + } + if (!event.args.data) { + return; + } + imageByDOMNodeId.set(event.args.data.DOMNodeId, event); +} +export function data() { + return imageByDOMNodeId; +} +//# sourceMappingURL=LargestImagePaintHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/LargestTextPaintHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/LargestTextPaintHandler.js new file mode 100644 index 000000000..19b09be15 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/LargestTextPaintHandler.js @@ -0,0 +1,26 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Types from '../types/types.js'; +/** + * A trace file will contain all the text paints that were candidates for the + * LargestTextPaint. If an LCP event is text, it will point to one of these + * candidates, so we store them by their DOM Node ID. + **/ +const textPaintByDOMNodeId = new Map(); +export function reset() { + textPaintByDOMNodeId.clear(); +} +export function handleEvent(event) { + if (!Types.TraceEvents.isTraceEventLargestTextPaintCandidate(event)) { + return; + } + if (!event.args.data) { + return; + } + textPaintByDOMNodeId.set(event.args.data.DOMNodeId, event); +} +export function data() { + return textPaintByDOMNodeId; +} +//# sourceMappingURL=LargestTextPaintHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/LayerTreeHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/LayerTreeHandler.js new file mode 100644 index 000000000..721eea980 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/LayerTreeHandler.js @@ -0,0 +1,116 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +import { data as metaHandlerData } from './MetaHandler.js'; +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +const paintEvents = []; +const snapshotEvents = []; +const paintToSnapshotMap = new Map(); +let lastPaintForLayerId = {}; +let currentMainFrameLayerTreeId = null; +const updateLayerEvents = []; +const relevantEvents = []; +export function reset() { + handlerState = 1 /* HandlerState.UNINITIALIZED */; + paintEvents.length = 0; + snapshotEvents.length = 0; + paintToSnapshotMap.clear(); + lastPaintForLayerId = {}; + currentMainFrameLayerTreeId = null; + updateLayerEvents.length = 0; + relevantEvents.length = 0; +} +export function initialize() { + if (handlerState !== 1 /* HandlerState.UNINITIALIZED */) { + throw new Error('LayerTree Handler was not reset before being initialized'); + } + handlerState = 2 /* HandlerState.INITIALIZED */; +} +export function handleEvent(event) { + // We gather up the events here but do all the processing in finalize(). This + // is because we need to have all the events before we process them, and we + // need the Meta handler to be finalized() so we can use its data as we need + // the mainFrameId to know which Layer(s) to care about. + if (Types.TraceEvents.isTraceEventPaint(event) || Types.TraceEvents.isTraceEventDisplayListItemListSnapshot(event) || + Types.TraceEvents.isTraceEventUpdateLayer(event) || Types.TraceEvents.isTraceEventSetLayerId(event)) { + relevantEvents.push(event); + } +} +export async function finalize() { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('LayerTree Handler is not initialized'); + } + const metaData = metaHandlerData(); + Helpers.Trace.sortTraceEventsInPlace(relevantEvents); + for (const event of relevantEvents) { + if (Types.TraceEvents.isTraceEventSetLayerId(event)) { + if (metaData.mainFrameId !== event.args.data.frame) { + // We only care about LayerId changes that affect the main frame. + continue; + } + currentMainFrameLayerTreeId = event.args.data.layerTreeId; + } + else if (Types.TraceEvents.isTraceEventUpdateLayer(event)) { + // We don't do anything with this event, but we need to store it because + // the information in it determines if we need to care about future + // snapshot events - we need to know what the active layer is when we see a + // snapshot. + updateLayerEvents.push(event); + } + else if (Types.TraceEvents.isTraceEventPaint(event)) { + if (!event.args.data.layerId) { + // Note that this check purposefully includes excluding an event with a layerId of 0. + // 0 indicates that this paint was for a subframe - we do not want these + // as we only care about paints for top level frames. + continue; + } + paintEvents.push(event); + lastPaintForLayerId[event.args.data.layerId] = event; + continue; + } + else if (Types.TraceEvents.isTraceEventDisplayListItemListSnapshot(event)) { + // First we figure out which layer is active for this event's thread. To + // do this we work backwards through the list of UpdateLayerEvents, + // finding the first one (i.e. the most recent one) with the same pid and + // tid. + let lastUpdateLayerEventForThread = null; + for (let i = updateLayerEvents.length - 1; i > -1; i--) { + const updateEvent = updateLayerEvents[i]; + if (updateEvent.pid === event.pid && updateEvent.tid === event.tid) { + lastUpdateLayerEventForThread = updateEvent; + break; + } + } + if (!lastUpdateLayerEventForThread) { + // No active layer, so this snapshot is not relevant. + continue; + } + if (lastUpdateLayerEventForThread.args.layerTreeId !== currentMainFrameLayerTreeId) { + // Snapshot applies to a layer that is not the main frame, so discard. + continue; + } + const paintEvent = lastPaintForLayerId[lastUpdateLayerEventForThread.args.layerId]; + if (!paintEvent) { + // No paint event for this layer, so discard. + continue; + } + snapshotEvents.push(event); + // Store the relationship between the paint and the snapshot. + paintToSnapshotMap.set(paintEvent, event); + } + } + handlerState = 3 /* HandlerState.FINALIZED */; +} +export function data() { + return { + paints: paintEvents, + snapshots: snapshotEvents, + paintsToSnapshots: paintToSnapshotMap, + }; +} +export function deps() { + return ['Meta']; +} +//# sourceMappingURL=LayerTreeHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/LayoutShiftsHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/LayoutShiftsHandler.js new file mode 100644 index 000000000..ff79ec0ed --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/LayoutShiftsHandler.js @@ -0,0 +1,349 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../../core/platform/platform.js'; +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +import { data as metaHandlerData } from './MetaHandler.js'; +// This represents the maximum #time we will allow a cluster to go before we +// reset it. +export const MAX_CLUSTER_DURATION = Helpers.Timing.millisecondsToMicroseconds(Types.Timing.MilliSeconds(5000)); +// This represents the maximum #time we will allow between layout shift events +// before considering it to be the start of a new cluster. +export const MAX_SHIFT_TIME_DELTA = Helpers.Timing.millisecondsToMicroseconds(Types.Timing.MilliSeconds(1000)); +// Layout shifts are reported globally to the developer, irrespective of which +// frame they originated in. However, each process does have its own individual +// CLS score, so we need to segment by process. This means Layout Shifts from +// sites with one process (no subframes, or subframes from the same origin) +// will be reported together. In the case of multiple renderers (frames across +// different origins), we offer the developer the ability to switch renderer in +// the UI. +const layoutShiftEvents = []; +// These events denote potential node resizings. We store them to link captured +// layout shifts to the resizing of unsized elements. +const layoutInvalidationEvents = []; +const scheduleStyleInvalidationEvents = []; +const styleRecalcInvalidationEvents = []; +const backendNodeIds = new Set(); +// Layout shifts happen during PrePaint as part of the rendering lifecycle. +// We determine if a LayoutInvalidation event is a potential root cause of a layout +// shift if the next PrePaint after the LayoutInvalidation is the parent +// node of such shift. +const prePaintEvents = []; +let sessionMaxScore = 0; +let clsWindowID = -1; +const clusters = []; +// The complete timeline of LS score changes in a trace. +// Includes drops to 0 when session windows end. +const scoreRecords = []; +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +export function initialize() { + if (handlerState !== 1 /* HandlerState.UNINITIALIZED */) { + throw new Error('LayoutShifts Handler was not reset'); + } + handlerState = 2 /* HandlerState.INITIALIZED */; +} +export function reset() { + handlerState = 1 /* HandlerState.UNINITIALIZED */; + layoutShiftEvents.length = 0; + layoutInvalidationEvents.length = 0; + scheduleStyleInvalidationEvents.length = 0; + styleRecalcInvalidationEvents.length = 0; + prePaintEvents.length = 0; + backendNodeIds.clear(); + clusters.length = 0; + sessionMaxScore = 0; + scoreRecords.length = 0; + clsWindowID = -1; +} +export function handleEvent(event) { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('Handler is not initialized'); + } + if (Types.TraceEvents.isTraceEventLayoutShift(event) && !event.args.data?.had_recent_input) { + layoutShiftEvents.push(event); + return; + } + if (Types.TraceEvents.isTraceEventLayoutInvalidationTracking(event)) { + layoutInvalidationEvents.push(event); + return; + } + if (Types.TraceEvents.isTraceEventScheduleStyleInvalidationTracking(event)) { + scheduleStyleInvalidationEvents.push(event); + } + if (Types.TraceEvents.isTraceEventStyleRecalcInvalidationTracking(event)) { + styleRecalcInvalidationEvents.push(event); + } + if (Types.TraceEvents.isTraceEventPrePaint(event)) { + prePaintEvents.push(event); + return; + } +} +function traceWindowFromTime(time) { + return { + min: time, + max: time, + range: Types.Timing.MicroSeconds(0), + }; +} +function updateTraceWindowMax(traceWindow, newMax) { + traceWindow.max = newMax; + traceWindow.range = Types.Timing.MicroSeconds(traceWindow.max - traceWindow.min); +} +function buildScoreRecords() { + const { traceBounds } = metaHandlerData(); + scoreRecords.push({ ts: traceBounds.min, score: 0 }); + for (const cluster of clusters) { + let clusterScore = 0; + if (cluster.events[0].args.data) { + scoreRecords.push({ ts: cluster.clusterWindow.min, score: cluster.events[0].args.data.weighted_score_delta }); + } + for (let i = 0; i < cluster.events.length; i++) { + const event = cluster.events[i]; + if (!event.args.data) { + continue; + } + clusterScore += event.args.data.weighted_score_delta; + scoreRecords.push({ ts: event.ts, score: clusterScore }); + } + scoreRecords.push({ ts: cluster.clusterWindow.max, score: 0 }); + } +} +/** + * Collects backend node ids coming from LayoutShift and LayoutInvalidation + * events. + */ +function collectNodes() { + backendNodeIds.clear(); + // Collect the node ids present in the shifts. + for (const layoutShift of layoutShiftEvents) { + if (!layoutShift.args.data?.impacted_nodes) { + continue; + } + for (const node of layoutShift.args.data.impacted_nodes) { + backendNodeIds.add(node.node_id); + } + } + // Collect the node ids present in LayoutInvalidation & scheduleStyleInvalidation events. + for (const layoutInvalidation of layoutInvalidationEvents) { + if (!layoutInvalidation.args.data?.nodeId) { + continue; + } + backendNodeIds.add(layoutInvalidation.args.data.nodeId); + } + for (const scheduleStyleInvalidation of scheduleStyleInvalidationEvents) { + if (!scheduleStyleInvalidation.args.data?.nodeId) { + continue; + } + backendNodeIds.add(scheduleStyleInvalidation.args.data.nodeId); + } +} +export async function finalize() { + // Ensure the events are sorted by #time ascending. + layoutShiftEvents.sort((a, b) => a.ts - b.ts); + prePaintEvents.sort((a, b) => a.ts - b.ts); + layoutInvalidationEvents.sort((a, b) => a.ts - b.ts); + // Each function transforms the data used by the next, as such the invoke order + // is important. + await buildLayoutShiftsClusters(); + buildScoreRecords(); + collectNodes(); + handlerState = 3 /* HandlerState.FINALIZED */; +} +async function buildLayoutShiftsClusters() { + const { navigationsByFrameId, mainFrameId, traceBounds } = metaHandlerData(); + const navigations = navigationsByFrameId.get(mainFrameId) || []; + if (layoutShiftEvents.length === 0) { + return; + } + let firstShiftTime = layoutShiftEvents[0].ts; + let lastShiftTime = layoutShiftEvents[0].ts; + let lastShiftNavigation = null; + // Now step through each and create clusters. + // A cluster is equivalent to a session window (see https://web.dev/cls/#what-is-cls). + // To make the line chart clear, we explicitly demark the limits of each session window + // by starting the cumulative score of the window at the time of the first layout shift + // and ending it (dropping the line back to 0) when the window ends according to the + // thresholds (MAX_CLUSTER_DURATION, MAX_SHIFT_TIME_DELTA). + for (const event of layoutShiftEvents) { + // First detect if either the cluster duration or the #time between this and + // the last shift has been exceeded. + const clusterDurationExceeded = event.ts - firstShiftTime > MAX_CLUSTER_DURATION; + const maxTimeDeltaSinceLastShiftExceeded = event.ts - lastShiftTime > MAX_SHIFT_TIME_DELTA; + // Next take a look at navigations. If between this and the last shift we have navigated, + // note it. + const currentShiftNavigation = Platform.ArrayUtilities.nearestIndexFromEnd(navigations, nav => nav.ts < event.ts); + const hasNavigated = lastShiftNavigation !== currentShiftNavigation && currentShiftNavigation !== null; + // If any of the above criteria are met or if we don't have any cluster yet we should + // start a new one. + if (clusterDurationExceeded || maxTimeDeltaSinceLastShiftExceeded || hasNavigated || !clusters.length) { + // The cluster starts #time should be the timestamp of the first layout shift in it. + const clusterStartTime = event.ts; + // If the last session window ended because the max delta time between shifts + // was exceeded set the endtime to MAX_SHIFT_TIME_DELTA microseconds after the + // last shift in the session. + const endTimeByMaxSessionDuration = clusterDurationExceeded ? firstShiftTime + MAX_CLUSTER_DURATION : Infinity; + // If the last session window ended because the max session duration was + // surpassed, set the endtime so that the window length = MAX_CLUSTER_DURATION; + const endTimeByMaxShiftGap = maxTimeDeltaSinceLastShiftExceeded ? lastShiftTime + MAX_SHIFT_TIME_DELTA : Infinity; + // If there was a navigation during the last window, close it at the time + // of the navigation. + const endTimeByNavigation = hasNavigated ? navigations[currentShiftNavigation].ts : Infinity; + // End the previous cluster at the time of the first of the criteria above that was met. + const previousClusterEndTime = Math.min(endTimeByMaxSessionDuration, endTimeByMaxShiftGap, endTimeByNavigation); + // If there is an existing cluster update its closing time. + if (clusters.length > 0) { + const currentCluster = clusters[clusters.length - 1]; + updateTraceWindowMax(currentCluster.clusterWindow, Types.Timing.MicroSeconds(previousClusterEndTime)); + } + clusters.push({ + events: [], + clusterWindow: traceWindowFromTime(clusterStartTime), + clusterCumulativeScore: 0, + scoreWindows: { + good: traceWindowFromTime(clusterStartTime), + needsImprovement: null, + bad: null, + }, + }); + firstShiftTime = clusterStartTime; + } + // Given the above we should have a cluster available, so pick the most + // recent one and append the shift, bump its score and window values accordingly. + const currentCluster = clusters[clusters.length - 1]; + const timeFromNavigation = currentShiftNavigation !== null ? + Types.Timing.MicroSeconds(event.ts - navigations[currentShiftNavigation].ts) : + undefined; + currentCluster.clusterCumulativeScore += event.args.data ? event.args.data.weighted_score_delta : 0; + if (!event.args.data) { + continue; + } + const shift = { + rawSourceEvent: event, + ...event, + args: { + frame: event.args.frame, + data: { + ...event.args.data, + rawEvent: event, + }, + }, + parsedData: { + timeFromNavigation, + cumulativeWeightedScoreInWindow: currentCluster.clusterCumulativeScore, + // The score of the session window is temporarily set to 0 just + // to initialize it. Since we need to get the score of all shifts + // in the session window to determine its value, its definite + // value is set when stepping through the built clusters. + sessionWindowData: { cumulativeWindowScore: 0, id: clusters.length }, + }, + }; + currentCluster.events.push(shift); + updateTraceWindowMax(currentCluster.clusterWindow, event.ts); + lastShiftTime = event.ts; + lastShiftNavigation = currentShiftNavigation; + } + // Now step through each cluster and set up the times at which the value + // goes from Good, to needs improvement, to Bad. Note that if there is a + // large jump we may go from Good to Bad without ever creating a Needs + // Improvement window at all. + for (const cluster of clusters) { + let weightedScore = 0; + let windowID = -1; + // If this is the last cluster update its window. The cluster duration is determined + // by the minimum between: time to next navigation, trace end time, time to maximum + // cluster duration and time to maximum gap between layout shifts. + if (cluster === clusters[clusters.length - 1]) { + const clusterEndByMaxDuration = MAX_CLUSTER_DURATION + cluster.clusterWindow.min; + const clusterEndByMaxGap = cluster.clusterWindow.max + MAX_SHIFT_TIME_DELTA; + const nextNavigationIndex = Platform.ArrayUtilities.nearestIndexFromBeginning(navigations, nav => nav.ts > cluster.clusterWindow.max); + const nextNavigationTime = nextNavigationIndex ? navigations[nextNavigationIndex].ts : Infinity; + const clusterEnd = Math.min(clusterEndByMaxDuration, clusterEndByMaxGap, traceBounds.max, nextNavigationTime); + updateTraceWindowMax(cluster.clusterWindow, Types.Timing.MicroSeconds(clusterEnd)); + } + for (const shift of cluster.events) { + weightedScore += shift.args.data ? shift.args.data.weighted_score_delta : 0; + windowID = shift.parsedData.sessionWindowData.id; + const ts = shift.ts; + // Update the the CLS score of this shift's session window now that + // we have it. + shift.parsedData.sessionWindowData.cumulativeWindowScore = cluster.clusterCumulativeScore; + if (weightedScore < 0.1 /* LayoutShiftsThreshold.NEEDS_IMPROVEMENT */) { + // Expand the Good window. + updateTraceWindowMax(cluster.scoreWindows.good, ts); + } + else if (weightedScore >= 0.1 /* LayoutShiftsThreshold.NEEDS_IMPROVEMENT */ && weightedScore < 0.25 /* LayoutShiftsThreshold.BAD */) { + if (!cluster.scoreWindows.needsImprovement) { + // Close the Good window, and open the needs improvement window. + updateTraceWindowMax(cluster.scoreWindows.good, Types.Timing.MicroSeconds(ts - 1)); + cluster.scoreWindows.needsImprovement = traceWindowFromTime(ts); + } + // Expand the needs improvement window. + updateTraceWindowMax(cluster.scoreWindows.needsImprovement, ts); + } + else if (weightedScore >= 0.25 /* LayoutShiftsThreshold.BAD */) { + if (!cluster.scoreWindows.bad) { + // We may jump from Good to Bad here, so update whichever window is open. + if (cluster.scoreWindows.needsImprovement) { + updateTraceWindowMax(cluster.scoreWindows.needsImprovement, Types.Timing.MicroSeconds(ts - 1)); + } + else { + updateTraceWindowMax(cluster.scoreWindows.good, Types.Timing.MicroSeconds(ts - 1)); + } + cluster.scoreWindows.bad = traceWindowFromTime(shift.ts); + } + // Expand the Bad window. + updateTraceWindowMax(cluster.scoreWindows.bad, ts); + } + // At this point the windows are set by the timestamps of the events, but the + // next cluster begins at the timestamp of its first event. As such we now + // need to expand the score window to the end of the cluster, and we do so + // by using the Bad widow if it's there, or the NI window, or finally the + // Good window. + if (cluster.scoreWindows.bad) { + updateTraceWindowMax(cluster.scoreWindows.bad, cluster.clusterWindow.max); + } + else if (cluster.scoreWindows.needsImprovement) { + updateTraceWindowMax(cluster.scoreWindows.needsImprovement, cluster.clusterWindow.max); + } + else { + updateTraceWindowMax(cluster.scoreWindows.good, cluster.clusterWindow.max); + } + } + if (weightedScore > sessionMaxScore) { + clsWindowID = windowID; + sessionMaxScore = weightedScore; + } + } +} +export function data() { + if (handlerState !== 3 /* HandlerState.FINALIZED */) { + throw new Error('Layout Shifts Handler is not finalized'); + } + return { + clusters, + sessionMaxScore: sessionMaxScore, + clsWindowID, + prePaintEvents, + layoutInvalidationEvents, + scheduleStyleInvalidationEvents, + styleRecalcInvalidationEvents: [], + scoreRecords, + // TODO(crbug/41484172): change the type so no need to clone + backendNodeIds: [...backendNodeIds], + }; +} +export function deps() { + return ['Screenshots', 'Meta']; +} +export function stateForLayoutShiftScore(score) { + let state = "good" /* ScoreClassification.GOOD */; + if (score >= 0.1 /* LayoutShiftsThreshold.NEEDS_IMPROVEMENT */) { + state = "ok" /* ScoreClassification.OK */; + } + if (score >= 0.25 /* LayoutShiftsThreshold.BAD */) { + state = "bad" /* ScoreClassification.BAD */; + } + return state; +} +//# sourceMappingURL=LayoutShiftsHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/MemoryHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/MemoryHandler.js new file mode 100644 index 000000000..8ffd722e6 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/MemoryHandler.js @@ -0,0 +1,20 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../../core/platform/platform.js'; +import * as Types from '../types/types.js'; +const updateCountersByProcess = new Map(); +export function reset() { + updateCountersByProcess.clear(); +} +export function handleEvent(event) { + if (Types.TraceEvents.isTraceEventUpdateCounters(event)) { + const countersForProcess = Platform.MapUtilities.getWithDefault(updateCountersByProcess, event.pid, () => []); + countersForProcess.push(event); + updateCountersByProcess.set(event.pid, countersForProcess); + } +} +export function data() { + return { updateCountersByProcess }; +} +//# sourceMappingURL=MemoryHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/MetaHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/MetaHandler.js new file mode 100644 index 000000000..a312ebcc4 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/MetaHandler.js @@ -0,0 +1,359 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../../core/platform/platform.js'; +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +// We track the renderer processes we see in each frame on the way through the trace. +const rendererProcessesByFrameId = new Map(); +// We will often want to key data by Frame IDs, and commonly we'll care most +// about the main frame's ID, so we store and expose that. +let mainFrameId = ''; +let mainFrameURL = ''; +const framesByProcessId = new Map(); +// We will often want to key data by the browser process, GPU process and top +// level renderer IDs, so keep a track on those. +let browserProcessId = Types.TraceEvents.ProcessID(-1); +let browserThreadId = Types.TraceEvents.ThreadID(-1); +let gpuProcessId = Types.TraceEvents.ProcessID(-1); +let gpuThreadId = Types.TraceEvents.ThreadID(-1); +let viewportRect = null; +const processNames = new Map(); +const topLevelRendererIds = new Set(); +const traceBounds = { + min: Types.Timing.MicroSeconds(Number.POSITIVE_INFINITY), + max: Types.Timing.MicroSeconds(Number.NEGATIVE_INFINITY), + range: Types.Timing.MicroSeconds(Number.POSITIVE_INFINITY), +}; +/** + * These represent the user navigating. Values such as First Contentful Paint, + * etc, are relative to the navigation. + * + * We store navigation events both by the frame and navigation ID. This means + * when we need to look them up, we can use whichever ID we have. + * + * Note that these Maps will have the same values in them; these are just keyed + * differently to make look-ups easier. + * + * We also additionally maintain an array of only navigations that occured on + * the main frame. In many places in the UI we only care about highlighting + * main frame navigations, so calculating this list here is better than + * filtering either of the below maps over and over again at the UI layer. + */ +const navigationsByFrameId = new Map(); +const navigationsByNavigationId = new Map(); +const mainFrameNavigations = []; +// Represents all the threads in the trace, organized by process. This is mostly for internal +// bookkeeping so that during the finalize pass we can obtain the main and browser thread IDs. +const threadsInProcess = new Map(); +let traceStartedTimeFromTracingStartedEvent = Types.Timing.MicroSeconds(-1); +const eventPhasesOfInterestForTraceBounds = new Set([ + "B" /* Types.TraceEvents.Phase.BEGIN */, + "E" /* Types.TraceEvents.Phase.END */, + "X" /* Types.TraceEvents.Phase.COMPLETE */, + "I" /* Types.TraceEvents.Phase.INSTANT */, +]); +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +// Tracks if the trace is a generic trace, which here means that it did not come from athe DevTools Performance Panel recording. +// We assume a trace is generic, and mark it as not generic if we see any of: +// - TracingStartedInPage +// - TracingStartedInBrowser +// - TracingSessionIdForWorker +// These are all events which indicate this is a Chrome browser trace. +let traceIsGeneric = true; +const CHROME_WEB_TRACE_EVENTS = new Set([ + "TracingStartedInPage" /* Types.TraceEvents.KnownEventName.TracingStartedInPage */, + "TracingSessionIdForWorker" /* Types.TraceEvents.KnownEventName.TracingSessionIdForWorker */, + "TracingStartedInBrowser" /* Types.TraceEvents.KnownEventName.TracingStartedInBrowser */, +]); +export function reset() { + navigationsByFrameId.clear(); + navigationsByNavigationId.clear(); + processNames.clear(); + mainFrameNavigations.length = 0; + browserProcessId = Types.TraceEvents.ProcessID(-1); + browserThreadId = Types.TraceEvents.ThreadID(-1); + gpuProcessId = Types.TraceEvents.ProcessID(-1); + gpuThreadId = Types.TraceEvents.ThreadID(-1); + viewportRect = null; + topLevelRendererIds.clear(); + threadsInProcess.clear(); + rendererProcessesByFrameId.clear(); + framesByProcessId.clear(); + traceBounds.min = Types.Timing.MicroSeconds(Number.POSITIVE_INFINITY); + traceBounds.max = Types.Timing.MicroSeconds(Number.NEGATIVE_INFINITY); + traceBounds.range = Types.Timing.MicroSeconds(Number.POSITIVE_INFINITY); + traceStartedTimeFromTracingStartedEvent = Types.Timing.MicroSeconds(-1); + traceIsGeneric = true; + handlerState = 1 /* HandlerState.UNINITIALIZED */; +} +export function initialize() { + if (handlerState !== 1 /* HandlerState.UNINITIALIZED */) { + throw new Error('Meta Handler was not reset'); + } + handlerState = 2 /* HandlerState.INITIALIZED */; +} +function updateRendererProcessByFrame(event, frame) { + const framesInProcessById = Platform.MapUtilities.getWithDefault(framesByProcessId, frame.processId, () => new Map()); + framesInProcessById.set(frame.frame, frame); + const rendererProcessInFrame = Platform.MapUtilities.getWithDefault(rendererProcessesByFrameId, frame.frame, () => new Map()); + const rendererProcessInfo = Platform.MapUtilities.getWithDefault(rendererProcessInFrame, frame.processId, () => { + return []; + }); + const lastProcessData = rendererProcessInfo.at(-1); + // Only store a new entry if the URL changed, otherwise it's just + // redundant information. + if (lastProcessData && lastProcessData.frame.url === frame.url) { + return; + } + // For now we store the time of the event as the min. In the finalize we step + // through each of these windows and update their max and range values. + rendererProcessInfo.push({ + frame, + window: { + min: event.ts, + max: Types.Timing.MicroSeconds(0), + range: Types.Timing.MicroSeconds(0), + }, + }); +} +export function handleEvent(event) { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('Meta Handler is not initialized'); + } + if (traceIsGeneric && CHROME_WEB_TRACE_EVENTS.has(event.name)) { + traceIsGeneric = false; + } + if (Types.TraceEvents.isProcessName(event)) { + processNames.set(event.pid, event); + } + // If there is a timestamp (which meta events do not have), and the event does + // not end with ::UMA then it, and the event is in the set of valid phases, + // then it should be included for the purposes of calculating the trace bounds. + // The UMA events in particular seem to be reported on page unloading, which + // often extends the bounds of the trace unhelpfully. + if (event.ts !== 0 && !event.name.endsWith('::UMA') && eventPhasesOfInterestForTraceBounds.has(event.ph)) { + traceBounds.min = Types.Timing.MicroSeconds(Math.min(event.ts, traceBounds.min)); + const eventDuration = event.dur || Types.Timing.MicroSeconds(0); + traceBounds.max = Types.Timing.MicroSeconds(Math.max(event.ts + eventDuration, traceBounds.max)); + } + if (Types.TraceEvents.isProcessName(event) && + (event.args.name === 'Browser' || event.args.name === 'HeadlessBrowser')) { + browserProcessId = event.pid; + return; + } + if (Types.TraceEvents.isProcessName(event) && (event.args.name === 'Gpu' || event.args.name === 'GPU Process')) { + gpuProcessId = event.pid; + return; + } + if (Types.TraceEvents.isThreadName(event) && event.args.name === 'CrGpuMain') { + gpuThreadId = event.tid; + return; + } + if (Types.TraceEvents.isThreadName(event) && event.args.name === 'CrBrowserMain') { + browserThreadId = event.tid; + } + if (Types.TraceEvents.isTraceEventMainFrameViewport(event) && viewportRect === null) { + const rectAsArray = event.args.data.viewport_rect; + const viewportX = rectAsArray[0]; + const viewportY = rectAsArray[1]; + const viewportWidth = rectAsArray[2]; + const viewportHeight = rectAsArray[5]; + viewportRect = new DOMRect(viewportX, viewportY, viewportWidth, viewportHeight); + } + // The TracingStartedInBrowser event includes the data on which frames are + // in scope at the start of the trace. We use this to identify the frame with + // no parent, i.e. the top level frame. + if (Types.TraceEvents.isTraceEventTracingStartedInBrowser(event)) { + traceStartedTimeFromTracingStartedEvent = event.ts; + if (!event.args.data) { + throw new Error('No frames found in trace data'); + } + for (const frame of (event.args.data.frames ?? [])) { + updateRendererProcessByFrame(event, frame); + if (!frame.parent) { + topLevelRendererIds.add(frame.processId); + } + // isOutermostMainFrame was added to trace events in April 2024 + // [crrev.com/c/5424783]. + // If our trace has that, it is the most accurate way of determining the + // main frame, as only one frame will have it set to true. + const canUseIsOutermostToDetermineMainFrame = 'isOutermostMainFrame' in frame; + if (canUseIsOutermostToDetermineMainFrame) { + // We have a "new" trace with isOutermostMainFrame. Therefore we mark + // the frame as the main frame if and ONLY IF it has + // isOutermostMainFrame set to true. + if (frame.isOutermostMainFrame) { + mainFrameId = frame.frame; + mainFrameURL = frame.url; + } + } + else { + // We have an "old" trace without isOutermostMainFrame. + // We fallback to looking for frames without a parent, and that have a + // URL set. This is a crude but pretty reliable way to determine the + // main frame. + if (!frame.parent && frame.url) { + mainFrameId = frame.frame; + mainFrameURL = frame.url; + } + } + } + return; + } + // FrameCommittedInBrowser events tell us information about each frame + // and we use these to track how long each individual renderer is active + // for. We track all renderers here (top level and those in frames), but + // for convenience we also populate a set of top level renderer IDs. + if (Types.TraceEvents.isTraceEventFrameCommittedInBrowser(event)) { + const frame = event.args.data; + if (!frame) { + return; + } + updateRendererProcessByFrame(event, frame); + if (frame.parent) { + return; + } + topLevelRendererIds.add(frame.processId); + return; + } + if (Types.TraceEvents.isTraceEventCommitLoad(event)) { + const frameData = event.args.data; + if (!frameData) { + return; + } + const { frame, name, url } = frameData; + updateRendererProcessByFrame(event, { processId: event.pid, frame, name, url }); + return; + } + // Track all threads based on the process & thread IDs. + if (Types.TraceEvents.isThreadName(event)) { + const threads = Platform.MapUtilities.getWithDefault(threadsInProcess, event.pid, () => new Map()); + threads.set(event.tid, event); + return; + } + // Track all navigation events. Note that there can be navigation start events + // but where the documentLoaderURL is empty. As far as the trace rendering is + // concerned, these events are noise so we filter them out here. + // (The filtering of empty URLs is done in the + // isTraceEventNavigationStartWithURL check) + if (Types.TraceEvents.isTraceEventNavigationStartWithURL(event) && event.args.data) { + const navigationId = event.args.data.navigationId; + if (navigationsByNavigationId.has(navigationId)) { + // We have only ever seen this situation once, in crbug.com/1503982, where the user ran: + // window.location.href = 'javascript:console.log("foo")' + // In this situation two identical navigationStart events are emitted with the same data, URL and ID. + // So, in this situation we drop/ignore any subsequent navigations if we have already seen that ID. + return; + } + navigationsByNavigationId.set(navigationId, event); + const frameId = event.args.frame; + const existingFrameNavigations = navigationsByFrameId.get(frameId) || []; + existingFrameNavigations.push(event); + navigationsByFrameId.set(frameId, existingFrameNavigations); + if (frameId === mainFrameId) { + mainFrameNavigations.push(event); + } + return; + } +} +export async function finalize() { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('Handler is not initialized'); + } + // We try to set the minimum time by finding the event with the smallest + // timestamp. However, if we also got a timestamp from the + // TracingStartedInBrowser event, we should always use that. + // But in some traces (for example, CPU profiles) we do not get that event, + // hence why we need to check we got a timestamp from it before setting it. + if (traceStartedTimeFromTracingStartedEvent >= 0) { + traceBounds.min = traceStartedTimeFromTracingStartedEvent; + } + traceBounds.range = Types.Timing.MicroSeconds(traceBounds.max - traceBounds.min); + // If we go from foo.com to example.com we will get a new renderer, and + // therefore the "top level renderer" will have a different PID as it has + // changed. Here we step through each renderer process and updated its window + // bounds, such that we end up with the time ranges in the trace for when + // each particular renderer started and stopped being the main renderer + // process. + for (const [, processWindows] of rendererProcessesByFrameId) { + const processWindowValues = [...processWindows.values()].flat(); + for (let i = 0; i < processWindowValues.length; i++) { + const currentWindow = processWindowValues[i]; + const nextWindow = processWindowValues[i + 1]; + // For the last window we set its max to be positive infinity. + // TODO: Move the trace bounds handler into meta so we can clamp first and last windows. + if (!nextWindow) { + currentWindow.window.max = Types.Timing.MicroSeconds(traceBounds.max); + currentWindow.window.range = Types.Timing.MicroSeconds(traceBounds.max - currentWindow.window.min); + } + else { + currentWindow.window.max = Types.Timing.MicroSeconds(nextWindow.window.min - 1); + currentWindow.window.range = Types.Timing.MicroSeconds(currentWindow.window.max - currentWindow.window.min); + } + } + } + // Frame ids which we didn't register using either the TracingStartedInBrowser or + // the FrameCommittedInBrowser events are considered noise, so we filter them out, as well + // as the navigations that belong to such frames. + for (const [frameId, navigations] of navigationsByFrameId) { + // The frames in the rendererProcessesByFrameId map come only from the + // TracingStartedInBrowser and FrameCommittedInBrowser events, so we can use it as point + // of comparison to determine if a frameId should be discarded. + if (rendererProcessesByFrameId.has(frameId)) { + continue; + } + navigationsByFrameId.delete(frameId); + for (const navigation of navigations) { + if (!navigation.args.data) { + continue; + } + navigationsByNavigationId.delete(navigation.args.data.navigationId); + } + } + // Sometimes in traces the TracingStartedInBrowser event can give us an + // incorrect initial URL for the main frame's URL - about:blank or the URL of + // the previous page. This doesn't matter too much except we often use this + // URL as the visual name of the trace shown to the user (e.g. in the history + // dropdown). We can be more accurate by finding the first main frame + // navigaton, and using its URL, if we have it. + // However, to avoid doing this in a case where the first navigation is far + // into the trace's lifecycle, we only do this in situations where the first + // navigation happened very soon (0.5 seconds) after the trace started + // recording. + const firstMainFrameNav = mainFrameNavigations.at(0); + const firstNavTimeThreshold = Helpers.Timing.secondsToMicroseconds(Types.Timing.Seconds(0.5)); + if (firstMainFrameNav) { + const navigationIsWithinThreshold = firstMainFrameNav.ts - traceBounds.min < firstNavTimeThreshold; + if (firstMainFrameNav.args.data?.isOutermostMainFrame && firstMainFrameNav.args.data?.documentLoaderURL && + navigationIsWithinThreshold) { + mainFrameURL = firstMainFrameNav.args.data.documentLoaderURL; + } + } + handlerState = 3 /* HandlerState.FINALIZED */; +} +export function data() { + if (handlerState !== 3 /* HandlerState.FINALIZED */) { + throw new Error('Meta Handler is not finalized'); + } + return { + traceBounds: { ...traceBounds }, + browserProcessId, + browserThreadId, + processNames, + gpuProcessId, + gpuThreadId: gpuThreadId === Types.TraceEvents.ThreadID(-1) ? undefined : gpuThreadId, + viewportRect: viewportRect || undefined, + mainFrameId, + mainFrameURL, + navigationsByFrameId, + navigationsByNavigationId, + threadsInProcess, + rendererProcessesByFrame: rendererProcessesByFrameId, + topLevelRendererIds, + frameByProcessId: framesByProcessId, + mainFrameNavigations, + traceIsGeneric, + }; +} +//# sourceMappingURL=MetaHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/ModelHandlers.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/ModelHandlers.js new file mode 100644 index 000000000..1a6a4c87f --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/ModelHandlers.js @@ -0,0 +1,29 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export * as Animations from './AnimationHandler.js'; +export * as AuctionWorklets from './AuctionWorkletsHandler.js'; +export * as ExtensionTraceData from './ExtensionTraceDataHandler.js'; +export * as Frames from './FramesHandler.js'; +export * as GPU from './GPUHandler.js'; +export * as ImagePainting from './ImagePaintingHandler.js'; +export * as Initiators from './InitiatorsHandler.js'; +export * as Invalidations from './InvalidationsHandler.js'; +export * as LargestImagePaint from './LargestImagePaintHandler.js'; +export * as LargestTextPaint from './LargestTextPaintHandler.js'; +export * as LayerTree from './LayerTreeHandler.js'; +export * as LayoutShifts from './LayoutShiftsHandler.js'; +export * as Memory from './MemoryHandler.js'; +export * as Meta from './MetaHandler.js'; +export * as NetworkRequests from './NetworkRequestsHandler.js'; +export * as PageFrames from './PageFramesHandler.js'; +export * as PageLoadMetrics from './PageLoadMetricsHandler.js'; +export * as Renderer from './RendererHandler.js'; +export * as Samples from './SamplesHandler.js'; +export * as Screenshots from './ScreenshotsHandler.js'; +export * as SelectorStats from './SelectorStatsHandler.js'; +export * as UserInteractions from './UserInteractionsHandler.js'; +export * as UserTimings from './UserTimingsHandler.js'; +export * as Warnings from './WarningsHandler.js'; +export * as Workers from './WorkersHandler.js'; +//# sourceMappingURL=ModelHandlers.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/NetworkRequestsHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/NetworkRequestsHandler.js new file mode 100644 index 000000000..33a118aea --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/NetworkRequestsHandler.js @@ -0,0 +1,368 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../../core/platform/platform.js'; +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +import { data as metaHandlerData } from './MetaHandler.js'; +const MILLISECONDS_TO_MICROSECONDS = 1000; +const SECONDS_TO_MICROSECONDS = 1000000; +const requestMap = new Map(); +const requestsByOrigin = new Map(); +const requestsByTime = []; +function storeTraceEventWithRequestId(requestId, key, value) { + if (!requestMap.has(requestId)) { + requestMap.set(requestId, {}); + } + const traceEvents = requestMap.get(requestId); + if (!traceEvents) { + throw new Error(`Unable to locate trace events for request ID ${requestId}`); + } + if (Array.isArray(traceEvents[key])) { + const target = traceEvents[key]; + const values = value; + target.push(...values); + } + else { + traceEvents[key] = value; + } +} +function firstPositiveValueInList(entries) { + for (const entry of entries) { + if (entry > 0) { + return entry; + } + } + // In the event we don't find a positive value, we return 0 so as to + // be a mathematical noop. It's typically not correct to return – say – + // a -1 here because it would affect the calculation of stats below. + return 0; +} +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +export function reset() { + requestsByOrigin.clear(); + requestMap.clear(); + requestsByTime.length = 0; + handlerState = 1 /* HandlerState.UNINITIALIZED */; +} +export function initialize() { + handlerState = 2 /* HandlerState.INITIALIZED */; +} +export function handleEvent(event) { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('Network Request handler is not initialized'); + } + if (Types.TraceEvents.isTraceEventResourceChangePriority(event)) { + storeTraceEventWithRequestId(event.args.data.requestId, 'changePriority', event); + return; + } + if (Types.TraceEvents.isTraceEventResourceWillSendRequest(event)) { + storeTraceEventWithRequestId(event.args.data.requestId, 'willSendRequests', [event]); + return; + } + if (Types.TraceEvents.isTraceEventResourceSendRequest(event)) { + storeTraceEventWithRequestId(event.args.data.requestId, 'sendRequests', [event]); + return; + } + if (Types.TraceEvents.isTraceEventResourceReceiveResponse(event)) { + storeTraceEventWithRequestId(event.args.data.requestId, 'receiveResponse', event); + return; + } + if (Types.TraceEvents.isTraceEventResourceReceivedData(event)) { + storeTraceEventWithRequestId(event.args.data.requestId, 'receivedData', [event]); + return; + } + if (Types.TraceEvents.isTraceEventResourceFinish(event)) { + storeTraceEventWithRequestId(event.args.data.requestId, 'resourceFinish', event); + return; + } + if (Types.TraceEvents.isTraceEventResourceMarkAsCached(event)) { + storeTraceEventWithRequestId(event.args.data.requestId, 'resourceMarkAsCached', event); + return; + } +} +export async function finalize() { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('Network Request handler is not initialized'); + } + const { rendererProcessesByFrame } = metaHandlerData(); + for (const [requestId, request] of requestMap.entries()) { + // If we have an incomplete set of events here, we choose to drop the network + // request rather than attempt to synthesize the missing data. + if (!request.sendRequests || !request.receiveResponse) { + continue; + } + // In the data we may get multiple willSendRequests and sendRequests, which + // will indicate that there are redirects for a given (sub)resource. In the + // case of a navigation, e.g., example.com/ we will get willSendRequests, + // and we should use these to calculate time spent in redirects. + // In the case of sub-resources, however, e.g., example.com/foo.js we will + // *only* get sendRequests, and we use these instead of willSendRequests + // to detect the time in redirects. We always use the sendRequest for the + // url, priority etc since it contains those values, but we use the + // willSendRequest (if it exists) to calculate the timestamp and durations + // of redirects. + const redirects = []; + for (let i = 0; i < request.sendRequests.length - 1; i++) { + const sendRequest = request.sendRequests[i]; + const nextSendRequest = request.sendRequests[i + 1]; + // Use the willSendRequests as the source for redirects if possible. + // We default to those of the sendRequests, however, since willSendRequest + // is not guaranteed to be present in the data for every request. + let ts = sendRequest.ts; + let dur = Types.Timing.MicroSeconds(nextSendRequest.ts - sendRequest.ts); + if (request.willSendRequests && request.willSendRequests[i] && request.willSendRequests[i + 1]) { + const willSendRequest = request.willSendRequests[i]; + const nextWillSendRequest = request.willSendRequests[i + 1]; + ts = willSendRequest.ts; + dur = Types.Timing.MicroSeconds(nextWillSendRequest.ts - willSendRequest.ts); + } + redirects.push({ + url: sendRequest.args.data.url, + priority: sendRequest.args.data.priority, + requestMethod: sendRequest.args.data.requestMethod, + ts, + dur, + }); + } + // If a ResourceFinish event with an encoded data length is received, + // then the resource was not cached; it was fetched before it was + // requested, e.g. because it was pushed in this navigation. + const isPushedResource = request.resourceFinish?.args.data.encodedDataLength !== 0; + // This works around crbug.com/998397, which reports pushed resources, and resources served by a service worker as disk cached. + const isDiskCached = request.receiveResponse.args.data.fromCache && + !request.receiveResponse.args.data.fromServiceWorker && !isPushedResource; + // If the request contains a resourceMarkAsCached event, it was served from memory cache. + const isMemoryCached = request.resourceMarkAsCached !== undefined; + // The timing data returned is from the original (uncached) request, which + // means that if we leave the above network record data as-is when the + // request came from either the disk cache or memory cache, our calculations + // will be incorrect. + // + // Here we add a flag so when we calculate the timestamps of the various + // events, we can overwrite them. + // These timestamps may not be perfect (indeed they don't always match + // the Network CDP domain exactly, which is likely an artifact of the way + // the data is routed on the backend), but they're the closest we have. + const isCached = isMemoryCached || isDiskCached; + const timing = request.receiveResponse.args.data.timing; + // If a non-cached request has no |timing| indicates data URLs, we ignore it. + if (!timing && !isCached) { + continue; + } + const firstSendRequest = request.sendRequests[0]; + const finalSendRequest = request.sendRequests[request.sendRequests.length - 1]; + const initialPriority = finalSendRequest.args.data.priority; + let finalPriority = initialPriority; + if (request.changePriority) { + finalPriority = request.changePriority.args.data.priority; + } + // Start time + // ======================= + // The time where the request started, which is either the first willSendRequest + // event if there is one, or, if there is not, the sendRequest. + const startTime = (request.willSendRequests && request.willSendRequests.length) ? + Types.Timing.MicroSeconds(request.willSendRequests[0].ts) : + Types.Timing.MicroSeconds(firstSendRequest.ts); + // End redirect time + // ======================= + // It's possible that when we start requesting data we will receive redirections. + // Here we note the time of the *last* willSendRequest / sendRequest event, + // which is used later on in the calculations for time queueing etc. + const endRedirectTime = (request.willSendRequests && request.willSendRequests.length) ? + Types.Timing.MicroSeconds(request.willSendRequests[request.willSendRequests.length - 1].ts) : + Types.Timing.MicroSeconds(finalSendRequest.ts); + // Finish time and end time + // ======================= + // The finish time and the end time are subtly different. + // - Finish time: records the point at which the network stack stopped receiving the data + // - End time: the timestamp of the finish event itself (if one exists) + // + // The end time, then, will be slightly after the finish time. + const endTime = request.resourceFinish ? request.resourceFinish.ts : endRedirectTime; + const finishTime = request.resourceFinish?.args.data.finishTime ? + Types.Timing.MicroSeconds(request.resourceFinish.args.data.finishTime * SECONDS_TO_MICROSECONDS) : + Types.Timing.MicroSeconds(endTime); + // Network duration + // ======================= + // Time spent on the network. + const networkDuration = isCached ? Types.Timing.MicroSeconds(0) : + Types.Timing.MicroSeconds((finishTime || endRedirectTime) - endRedirectTime); + // Processing duration + // ======================= + // Time spent from start to end. + const processingDuration = Types.Timing.MicroSeconds(endTime - (finishTime || endTime)); + // Redirection duration + // ======================= + // Time between the first willSendRequest / sendRequest and last. This we place in *front* of the + // queueing, since the queueing time that we know about from the trace data is only the last request, + // i.e., the one that occurs after all the redirects. + const redirectionDuration = Types.Timing.MicroSeconds(endRedirectTime - startTime); + // Queueing + // ======================= + // The amount of time queueing is the time between the request's start time to the requestTime + // arg recorded in the receiveResponse event. In the cases where the recorded start time is larger + // that the requestTime we set queueing time to zero. + const queueing = isCached ? + Types.Timing.MicroSeconds(0) : + Types.Timing.MicroSeconds(Platform.NumberUtilities.clamp((timing.requestTime * SECONDS_TO_MICROSECONDS - endRedirectTime), 0, Number.MAX_VALUE)); + // Stalled + // ======================= + // If the request is cached, the amount of time stalled is the time between the start time and + // receiving a response. + // Otherwise it is whichever positive number comes first from the following timing info: + // DNS start, Connection start, Send Start, or the time duration between our start time and + // receiving a response. + const stalled = isCached ? Types.Timing.MicroSeconds(request.receiveResponse.ts - startTime) : + Types.Timing.MicroSeconds(firstPositiveValueInList([ + timing.dnsStart * MILLISECONDS_TO_MICROSECONDS, + timing.connectStart * MILLISECONDS_TO_MICROSECONDS, + timing.sendStart * MILLISECONDS_TO_MICROSECONDS, + (request.receiveResponse.ts - endRedirectTime), + ])); + // Sending HTTP request + // ======================= + // Time when the HTTP request is sent. + const sendStartTime = isCached ? + startTime : + Types.Timing.MicroSeconds(timing.requestTime * SECONDS_TO_MICROSECONDS + timing.sendStart * MILLISECONDS_TO_MICROSECONDS); + // Waiting + // ======================= + // Time from when the send finished going to when the headers were received. + const waiting = isCached ? + Types.Timing.MicroSeconds(0) : + Types.Timing.MicroSeconds((timing.receiveHeadersEnd - timing.sendEnd) * MILLISECONDS_TO_MICROSECONDS); + // Download + // ======================= + // Time from receipt of headers to the finish time. + const downloadStart = isCached ? + startTime : + Types.Timing.MicroSeconds(timing.requestTime * SECONDS_TO_MICROSECONDS + timing.receiveHeadersEnd * MILLISECONDS_TO_MICROSECONDS); + const download = isCached ? Types.Timing.MicroSeconds(endTime - request.receiveResponse.ts) : + Types.Timing.MicroSeconds(((finishTime || downloadStart) - downloadStart)); + const totalTime = Types.Timing.MicroSeconds(networkDuration + processingDuration); + // Collect a few values from the timing info. + // If the Network request is cached, we zero out them. + const dnsLookup = isCached ? + Types.Timing.MicroSeconds(0) : + Types.Timing.MicroSeconds((timing.dnsEnd - timing.dnsStart) * MILLISECONDS_TO_MICROSECONDS); + const ssl = isCached ? Types.Timing.MicroSeconds(0) : + Types.Timing.MicroSeconds((timing.sslEnd - timing.sslStart) * MILLISECONDS_TO_MICROSECONDS); + const proxyNegotiation = isCached ? + Types.Timing.MicroSeconds(0) : + Types.Timing.MicroSeconds((timing.proxyEnd - timing.proxyStart) * MILLISECONDS_TO_MICROSECONDS); + const requestSent = isCached ? + Types.Timing.MicroSeconds(0) : + Types.Timing.MicroSeconds((timing.sendEnd - timing.sendStart) * MILLISECONDS_TO_MICROSECONDS); + const initialConnection = isCached ? + Types.Timing.MicroSeconds(0) : + Types.Timing.MicroSeconds((timing.connectEnd - timing.connectStart) * MILLISECONDS_TO_MICROSECONDS); + // Finally get some of the general data from the trace events. + const { frame, url, renderBlocking } = finalSendRequest.args.data; + const { encodedDataLength, decodedBodyLength } = request.resourceFinish ? request.resourceFinish.args.data : { encodedDataLength: 0, decodedBodyLength: 0 }; + const parsedUrl = new URL(url); + const isHttps = parsedUrl.protocol === 'https:'; + const requestingFrameUrl = Helpers.Trace.activeURLForFrameAtTime(frame, finalSendRequest.ts, rendererProcessesByFrame) || ''; + // Construct a synthetic trace event for this network request. + const networkEvent = { + rawSourceEvent: finalSendRequest, + args: { + data: { + // All data we create from trace events should be added to |syntheticData|. + syntheticData: { + dnsLookup, + download, + downloadStart, + finishTime, + initialConnection, + isDiskCached, + isHttps, + isMemoryCached, + isPushedResource, + networkDuration, + processingDuration, + proxyNegotiation, + queueing, + redirectionDuration, + requestSent, + sendStartTime, + ssl, + stalled, + totalTime, + waiting, + }, + // All fields below are from TraceEventsForNetworkRequest. + decodedBodyLength, + encodedDataLength, + frame, + fromServiceWorker: request.receiveResponse.args.data.fromServiceWorker, + isLinkPreload: request.receiveResponse.args.data.isLinkPreload || false, + mimeType: request.receiveResponse.args.data.mimeType, + priority: finalPriority, + initialPriority, + protocol: request.receiveResponse.args.data.protocol ?? 'unknown', + redirects, + // In the event the property isn't set, assume non-blocking. + renderBlocking: renderBlocking ? renderBlocking : 'non_blocking', + requestId, + requestingFrameUrl, + requestMethod: finalSendRequest.args.data.requestMethod, + resourceType: finalSendRequest.args.data.resourceType, + statusCode: request.receiveResponse.args.data.statusCode, + responseHeaders: request.receiveResponse.args.data.headers || [], + fetchPriorityHint: finalSendRequest.args.data.fetchPriorityHint, + initiator: finalSendRequest.args.data.initiator, + stackTrace: finalSendRequest.args.data.stackTrace, + timing, + url, + failed: request.resourceFinish?.args.data.didFail ?? false, + finished: Boolean(request.resourceFinish), + connectionId: request.receiveResponse.args.data.connectionId, + connectionReused: request.receiveResponse.args.data.connectionReused, + }, + }, + cat: 'loading', + name: 'SyntheticNetworkRequest', + ph: "X" /* Types.TraceEvents.Phase.COMPLETE */, + dur: Types.Timing.MicroSeconds(endTime - startTime), + tdur: Types.Timing.MicroSeconds(endTime - startTime), + ts: Types.Timing.MicroSeconds(startTime), + tts: Types.Timing.MicroSeconds(startTime), + pid: finalSendRequest.pid, + tid: finalSendRequest.tid, + }; + const requests = Platform.MapUtilities.getWithDefault(requestsByOrigin, parsedUrl.host, () => { + return { + renderBlocking: [], + nonRenderBlocking: [], + all: [], + }; + }); + // For ease of rendering we sometimes want to differentiate between + // render-blocking and non-render-blocking, so we divide the data here. + if (networkEvent.args.data.renderBlocking === 'non_blocking') { + requests.nonRenderBlocking.push(networkEvent); + } + else { + requests.renderBlocking.push(networkEvent); + } + // However, there are also times where we just want to loop through all + // the captured requests, so here we store all of them together. + requests.all.push(networkEvent); + requestsByTime.push(networkEvent); + } + handlerState = 3 /* HandlerState.FINALIZED */; +} +export function data() { + if (handlerState !== 3 /* HandlerState.FINALIZED */) { + throw new Error('Network Request handler is not finalized'); + } + return { + byOrigin: requestsByOrigin, + byTime: requestsByTime, + }; +} +export function deps() { + return ['Meta']; +} +//# sourceMappingURL=NetworkRequestsHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/PageFramesHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/PageFramesHandler.js new file mode 100644 index 000000000..694a7029d --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/PageFramesHandler.js @@ -0,0 +1,41 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Types from '../types/types.js'; +const frames = new Map(); +export function reset() { + frames.clear(); +} +export function handleEvent(event) { + if (Types.TraceEvents.isTraceEventTracingStartedInBrowser(event)) { + for (const frame of event.args.data?.frames ?? []) { + // The ID of a frame is stored under the `frame` key. + frames.set(frame.frame, frame); + } + return; + } + // CommitLoad events can contain an updated URL or Name for a frame. + if (Types.TraceEvents.isTraceEventCommitLoad(event)) { + const frameData = event.args.data; + if (!frameData) { + return; + } + // We don't want to mutate the original object, hence why + // we set a new object from the new and existing values. + const frame = frames.get(frameData.frame); + if (!frame) { + return; + } + frames.set(frameData.frame, { + ...frame, + url: frameData.url || frame.url, + name: frameData.name || frameData.name, + }); + } +} +export function data() { + return { + frames, + }; +} +//# sourceMappingURL=PageFramesHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/PageLoadMetricsHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/PageLoadMetricsHandler.js new file mode 100644 index 000000000..42826d3db --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/PageLoadMetricsHandler.js @@ -0,0 +1,383 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +/** + * This handler stores page load metrics, including web vitals, + * and exports them in the shape of a map with the following shape: + * Map(FrameId -> Map(navigationID -> metrics) ) + * + * It also exports all markers in a trace in an array. + * + * Some metrics are taken directly from a page load events (AKA markers) like DCL. + * Others require processing multiple events to be determined, like CLS and TBT. + */ +import * as Platform from '../../../core/platform/platform.js'; +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +import { data as metaHandlerData } from './MetaHandler.js'; +/** + * This represents the metric scores for all navigations, for all frames in a trace. + * Given a frame id, the map points to another map from navigation id to metric scores. + * The metric scores include the event related to the metric as well as the data regarding + * the score itself. + */ +const metricScoresByFrameId = new Map(); +/** + * Page load events with no associated duration that happened in the + * main frame. + */ +let allMarkerEvents = []; +export function reset() { + metricScoresByFrameId.clear(); + pageLoadEventsArray = []; + allMarkerEvents = []; + selectedLCPCandidateEvents.clear(); +} +let pageLoadEventsArray = []; +// Once we've found the LCP events in the trace we want to fetch their DOM Node +// from the backend. We could do this by parsing through our Map of frame => +// navigation => metric, but it's easier to keep a set of LCP events. As we +// parse the trace, any time we store an LCP candidate as the potential LCP +// event, we store the event here. If we later find a new candidate in the +// trace, we store that and delete the prior event. When we've parsed the +// entire trace this set will contain all the LCP events that were used - e.g. +// the candidates that were the actual LCP events. +const selectedLCPCandidateEvents = new Set(); +export function handleEvent(event) { + if (!Types.TraceEvents.eventIsPageLoadEvent(event)) { + return; + } + pageLoadEventsArray.push(event); +} +function storePageLoadMetricAgainstNavigationId(navigation, event) { + const navigationId = navigation.args.data?.navigationId; + if (!navigationId) { + throw new Error('Navigation event unexpectedly had no navigation ID.'); + } + const frameId = getFrameIdForPageLoadEvent(event); + const { rendererProcessesByFrame } = metaHandlerData(); + // If either of these pieces of data do not exist, the most likely + // explanation is that the page load metric we found is for a frame/process + // combo that the MetaHandler discarded. This typically happens if we get a + // navigation event with an empty URL. Therefore, we will silently return and + // drop this metric. If we didn't care about the navigation, we certainly do + // not need to care about metrics for that navigation. + const rendererProcessesInFrame = rendererProcessesByFrame.get(frameId); + if (!rendererProcessesInFrame) { + return; + } + const processData = rendererProcessesInFrame.get(event.pid); + if (!processData) { + return; + } + if (Types.TraceEvents.isTraceEventNavigationStart(event)) { + return; + } + if (Types.TraceEvents.isTraceEventFirstContentfulPaint(event)) { + const fcpTime = Types.Timing.MicroSeconds(event.ts - navigation.ts); + const score = Helpers.Timing.formatMicrosecondsTime(fcpTime, { + format: 2 /* Types.Timing.TimeUnit.SECONDS */, + maximumFractionDigits: 2, + }); + const classification = scoreClassificationForFirstContentfulPaint(fcpTime); + const metricScore = { event, score, metricName: "FCP" /* MetricName.FCP */, classification, navigation, timing: fcpTime }; + storeMetricScore(frameId, navigationId, metricScore); + return; + } + if (Types.TraceEvents.isTraceEventFirstPaint(event)) { + const paintTime = Types.Timing.MicroSeconds(event.ts - navigation.ts); + const score = Helpers.Timing.formatMicrosecondsTime(paintTime, { + format: 2 /* Types.Timing.TimeUnit.SECONDS */, + maximumFractionDigits: 2, + }); + const classification = "unclassified" /* ScoreClassification.UNCLASSIFIED */; + const metricScore = { event, score, metricName: "FP" /* MetricName.FP */, classification, navigation, timing: paintTime }; + storeMetricScore(frameId, navigationId, metricScore); + return; + } + if (Types.TraceEvents.isTraceEventMarkDOMContent(event)) { + const dclTime = Types.Timing.MicroSeconds(event.ts - navigation.ts); + const score = Helpers.Timing.formatMicrosecondsTime(dclTime, { + format: 2 /* Types.Timing.TimeUnit.SECONDS */, + maximumFractionDigits: 2, + }); + const metricScore = { + event, + score, + metricName: "DCL" /* MetricName.DCL */, + classification: scoreClassificationForDOMContentLoaded(dclTime), + navigation, + timing: dclTime, + }; + storeMetricScore(frameId, navigationId, metricScore); + return; + } + if (Types.TraceEvents.isTraceEventInteractiveTime(event)) { + const ttiValue = Types.Timing.MicroSeconds(event.ts - navigation.ts); + const ttiScore = Helpers.Timing.formatMicrosecondsTime(ttiValue, { + format: 2 /* Types.Timing.TimeUnit.SECONDS */, + maximumFractionDigits: 2, + }); + const tti = { + event, + score: ttiScore, + metricName: "TTI" /* MetricName.TTI */, + classification: scoreClassificationForTimeToInteractive(ttiValue), + navigation, + timing: ttiValue, + }; + storeMetricScore(frameId, navigationId, tti); + const tbtValue = Helpers.Timing.millisecondsToMicroseconds(Types.Timing.MilliSeconds(event.args.args.total_blocking_time_ms)); + const tbtScore = Helpers.Timing.formatMicrosecondsTime(tbtValue, { + format: 1 /* Types.Timing.TimeUnit.MILLISECONDS */, + maximumFractionDigits: 2, + }); + const tbt = { + event, + score: tbtScore, + metricName: "TBT" /* MetricName.TBT */, + classification: scoreClassificationForTotalBlockingTime(tbtValue), + navigation, + timing: tbtValue, + }; + storeMetricScore(frameId, navigationId, tbt); + return; + } + if (Types.TraceEvents.isTraceEventMarkLoad(event)) { + const loadTime = Types.Timing.MicroSeconds(event.ts - navigation.ts); + const score = Helpers.Timing.formatMicrosecondsTime(loadTime, { + format: 2 /* Types.Timing.TimeUnit.SECONDS */, + maximumFractionDigits: 2, + }); + const metricScore = { + event, + score, + metricName: "L" /* MetricName.L */, + classification: "unclassified" /* ScoreClassification.UNCLASSIFIED */, + navigation, + timing: loadTime, + }; + storeMetricScore(frameId, navigationId, metricScore); + return; + } + if (Types.TraceEvents.isTraceEventLargestContentfulPaintCandidate(event)) { + const candidateIndex = event.args.data?.candidateIndex; + if (!candidateIndex) { + throw new Error('Largest Contentful Paint unexpectedly had no candidateIndex.'); + } + const lcpTime = Types.Timing.MicroSeconds(event.ts - navigation.ts); + const lcpScore = Helpers.Timing.formatMicrosecondsTime(lcpTime, { + format: 2 /* Types.Timing.TimeUnit.SECONDS */, + maximumFractionDigits: 2, + }); + const lcp = { + event, + score: lcpScore, + metricName: "LCP" /* MetricName.LCP */, + classification: scoreClassificationForLargestContentfulPaint(lcpTime), + navigation, + timing: lcpTime, + }; + const metricsByNavigation = Platform.MapUtilities.getWithDefault(metricScoresByFrameId, frameId, () => new Map()); + const metrics = Platform.MapUtilities.getWithDefault(metricsByNavigation, navigationId, () => new Map()); + const lastLCPCandidate = metrics.get("LCP" /* MetricName.LCP */); + if (lastLCPCandidate === undefined) { + selectedLCPCandidateEvents.add(lcp.event); + storeMetricScore(frameId, navigationId, lcp); + return; + } + const lastLCPCandidateEvent = lastLCPCandidate.event; + if (!Types.TraceEvents.isTraceEventLargestContentfulPaintCandidate(lastLCPCandidateEvent)) { + return; + } + const lastCandidateIndex = lastLCPCandidateEvent.args.data?.candidateIndex; + if (!lastCandidateIndex) { + // lastCandidateIndex cannot be undefined because we don't store candidates with + // with an undefined candidateIndex value. This check is only to make TypeScript + // treat the field as not undefined below. + return; + } + if (lastCandidateIndex < candidateIndex) { + selectedLCPCandidateEvents.delete(lastLCPCandidateEvent); + selectedLCPCandidateEvents.add(lcp.event); + storeMetricScore(frameId, navigationId, lcp); + } + return; + } + if (Types.TraceEvents.isTraceEventLayoutShift(event)) { + return; + } + return Platform.assertNever(event, `Unexpected event type: ${event}`); +} +function storeMetricScore(frameId, navigationId, metricScore) { + const metricsByNavigation = Platform.MapUtilities.getWithDefault(metricScoresByFrameId, frameId, () => new Map()); + const metrics = Platform.MapUtilities.getWithDefault(metricsByNavigation, navigationId, () => new Map()); + // If an entry with that metric name is present, delete it so that the new entry that + // will replace it is added at the end of the map. This way we guarantee the map entries + // are ordered in ASC manner by timestamp. + metrics.delete(metricScore.metricName); + metrics.set(metricScore.metricName, metricScore); +} +export function getFrameIdForPageLoadEvent(event) { + if (Types.TraceEvents.isTraceEventFirstContentfulPaint(event) || + Types.TraceEvents.isTraceEventInteractiveTime(event) || + Types.TraceEvents.isTraceEventLargestContentfulPaintCandidate(event) || + Types.TraceEvents.isTraceEventNavigationStart(event) || Types.TraceEvents.isTraceEventLayoutShift(event) || + Types.TraceEvents.isTraceEventFirstPaint(event)) { + return event.args.frame; + } + if (Types.TraceEvents.isTraceEventMarkDOMContent(event) || Types.TraceEvents.isTraceEventMarkLoad(event)) { + const frameId = event.args.data?.frame; + if (!frameId) { + throw new Error('MarkDOMContent unexpectedly had no frame ID.'); + } + return frameId; + } + Platform.assertNever(event, `Unexpected event type: ${event}`); +} +function getNavigationForPageLoadEvent(event) { + if (Types.TraceEvents.isTraceEventFirstContentfulPaint(event) || + Types.TraceEvents.isTraceEventLargestContentfulPaintCandidate(event) || + Types.TraceEvents.isTraceEventFirstPaint(event)) { + const navigationId = event.args.data?.navigationId; + if (!navigationId) { + throw new Error('Trace event unexpectedly had no navigation ID.'); + } + const { navigationsByNavigationId } = metaHandlerData(); + const navigation = navigationsByNavigationId.get(navigationId); + if (!navigation) { + // This event's navigation has been filtered out by the meta handler as a noise event. + return null; + } + return navigation; + } + if (Types.TraceEvents.isTraceEventMarkDOMContent(event) || Types.TraceEvents.isTraceEventInteractiveTime(event) || + Types.TraceEvents.isTraceEventLayoutShift(event) || Types.TraceEvents.isTraceEventMarkLoad(event)) { + const frameId = getFrameIdForPageLoadEvent(event); + const { navigationsByFrameId } = metaHandlerData(); + return Helpers.Trace.getNavigationForTraceEvent(event, frameId, navigationsByFrameId); + } + if (Types.TraceEvents.isTraceEventNavigationStart(event)) { + // We don't want to compute metrics of the navigation relative to itself, so we'll avoid avoid all that. + return null; + } + return Platform.assertNever(event, `Unexpected event type: ${event}`); +} +/** + * Classifications sourced from + * https://web.dev/fcp/ + */ +export function scoreClassificationForFirstContentfulPaint(fcpScoreInMicroseconds) { + const FCP_GOOD_TIMING = Helpers.Timing.secondsToMicroseconds(Types.Timing.Seconds(1.8)); + const FCP_MEDIUM_TIMING = Helpers.Timing.secondsToMicroseconds(Types.Timing.Seconds(3.0)); + let scoreClassification = "bad" /* ScoreClassification.BAD */; + if (fcpScoreInMicroseconds <= FCP_MEDIUM_TIMING) { + scoreClassification = "ok" /* ScoreClassification.OK */; + } + if (fcpScoreInMicroseconds <= FCP_GOOD_TIMING) { + scoreClassification = "good" /* ScoreClassification.GOOD */; + } + return scoreClassification; +} +/** + * Classifications sourced from + * https://web.dev/interactive/#how-lighthouse-determines-your-tti-score + */ +export function scoreClassificationForTimeToInteractive(ttiTimeInMicroseconds) { + const TTI_GOOD_TIMING = Helpers.Timing.secondsToMicroseconds(Types.Timing.Seconds(3.8)); + const TTI_MEDIUM_TIMING = Helpers.Timing.secondsToMicroseconds(Types.Timing.Seconds(7.3)); + let scoreClassification = "bad" /* ScoreClassification.BAD */; + if (ttiTimeInMicroseconds <= TTI_MEDIUM_TIMING) { + scoreClassification = "ok" /* ScoreClassification.OK */; + } + if (ttiTimeInMicroseconds <= TTI_GOOD_TIMING) { + scoreClassification = "good" /* ScoreClassification.GOOD */; + } + return scoreClassification; +} +/** + * Classifications sourced from + * https://web.dev/lcp/#what-is-lcp + */ +export function scoreClassificationForLargestContentfulPaint(lcpTimeInMicroseconds) { + const LCP_GOOD_TIMING = Helpers.Timing.secondsToMicroseconds(Types.Timing.Seconds(2.5)); + const LCP_MEDIUM_TIMING = Helpers.Timing.secondsToMicroseconds(Types.Timing.Seconds(4)); + let scoreClassification = "bad" /* ScoreClassification.BAD */; + if (lcpTimeInMicroseconds <= LCP_MEDIUM_TIMING) { + scoreClassification = "ok" /* ScoreClassification.OK */; + } + if (lcpTimeInMicroseconds <= LCP_GOOD_TIMING) { + scoreClassification = "good" /* ScoreClassification.GOOD */; + } + return scoreClassification; +} +/** + * DCL does not have a classification. + */ +export function scoreClassificationForDOMContentLoaded(_dclTimeInMicroseconds) { + return "unclassified" /* ScoreClassification.UNCLASSIFIED */; +} +/** + * Classifications sourced from + * https://web.dev/lighthouse-total-blocking-#time/ + */ +export function scoreClassificationForTotalBlockingTime(tbtTimeInMicroseconds) { + const TBT_GOOD_TIMING = Helpers.Timing.millisecondsToMicroseconds(Types.Timing.MilliSeconds(200)); + const TBT_MEDIUM_TIMING = Helpers.Timing.millisecondsToMicroseconds(Types.Timing.MilliSeconds(600)); + let scoreClassification = "bad" /* ScoreClassification.BAD */; + if (tbtTimeInMicroseconds <= TBT_MEDIUM_TIMING) { + scoreClassification = "ok" /* ScoreClassification.OK */; + } + if (tbtTimeInMicroseconds <= TBT_GOOD_TIMING) { + scoreClassification = "good" /* ScoreClassification.GOOD */; + } + return scoreClassification; +} +/** + * Gets all the Largest Contentful Paint scores of all the frames in the + * trace. + */ +function gatherFinalLCPEvents() { + const allFinalLCPEvents = []; + const dataForAllFrames = [...metricScoresByFrameId.values()]; + const dataForAllNavigations = dataForAllFrames.flatMap(frameData => [...frameData.values()]); + for (let i = 0; i < dataForAllNavigations.length; i++) { + const navigationData = dataForAllNavigations[i]; + const lcpInNavigation = navigationData.get("LCP" /* MetricName.LCP */); + if (!lcpInNavigation || !lcpInNavigation.event) { + continue; + } + allFinalLCPEvents.push(lcpInNavigation.event); + } + return allFinalLCPEvents; +} +export async function finalize() { + pageLoadEventsArray.sort((a, b) => a.ts - b.ts); + for (const pageLoadEvent of pageLoadEventsArray) { + const navigation = getNavigationForPageLoadEvent(pageLoadEvent); + if (navigation) { + // Event's navigation was not filtered out as noise. + storePageLoadMetricAgainstNavigationId(navigation, pageLoadEvent); + } + } + // NOTE: if you are looking for the TBT calculation, it has temporarily been + // removed. See crbug.com/1424335 for details. + const allFinalLCPEvents = gatherFinalLCPEvents(); + const mainFrame = metaHandlerData().mainFrameId; + // Filter out LCP candidates to use only definitive LCP values + const allEventsButLCP = pageLoadEventsArray.filter(event => !Types.TraceEvents.isTraceEventLargestContentfulPaintCandidate(event)); + const markerEvents = [...allFinalLCPEvents, ...allEventsButLCP].filter(Types.TraceEvents.isTraceEventMarkerEvent); + // Filter by main frame and sort. + allMarkerEvents = + markerEvents.filter(event => getFrameIdForPageLoadEvent(event) === mainFrame).sort((a, b) => a.ts - b.ts); +} +export function data() { + return { + metricScoresByFrameId, + allMarkerEvents, + }; +} +export function deps() { + return ['Meta']; +} +//# sourceMappingURL=PageLoadMetricsHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/RendererHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/RendererHandler.js new file mode 100644 index 000000000..c09f4cbb0 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/RendererHandler.js @@ -0,0 +1,336 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../../core/platform/platform.js'; +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +import { data as auctionWorkletsData } from './AuctionWorkletsHandler.js'; +import { data as metaHandlerData } from './MetaHandler.js'; +import { data as samplesHandlerData } from './SamplesHandler.js'; +/** + * This handler builds the hierarchy of trace events and profile calls + * on each thread on each process. + * + * Throughout the code, trace events and profile calls are referred to + * as "entries", but note they are different types of data. Trace events + * come directly from the backend and it's the type the engine commonly + * refers to. Profile calls on the other hand are built in the frontend, + * and, for compatibility purposes, typed as an extension to the trace + * event type. + */ +const processes = new Map(); +// We track the compositor tile worker thread name events so that at the end we +// can return these keyed by the process ID. These are used in the frontend to +// show the user the rasterization thread(s) on the main frame as tracks. +const compositorTileWorkers = Array(); +const entryToNode = new Map(); +let allTraceEntries = []; +const completeEventStack = []; +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +let config = Types.Configuration.defaults(); +const makeRendererProcess = () => ({ + url: null, + isOnMainFrame: false, + threads: new Map(), +}); +const makeRendererThread = () => ({ + name: null, + entries: [], +}); +const getOrCreateRendererProcess = (processes, pid) => { + return Platform.MapUtilities.getWithDefault(processes, pid, makeRendererProcess); +}; +const getOrCreateRendererThread = (process, tid) => { + return Platform.MapUtilities.getWithDefault(process.threads, tid, makeRendererThread); +}; +export function handleUserConfig(userConfig) { + config = userConfig; +} +export function reset() { + processes.clear(); + entryToNode.clear(); + allTraceEntries.length = 0; + completeEventStack.length = 0; + compositorTileWorkers.length = 0; + handlerState = 1 /* HandlerState.UNINITIALIZED */; +} +export function initialize() { + if (handlerState !== 1 /* HandlerState.UNINITIALIZED */) { + throw new Error('Renderer Handler was not reset'); + } + handlerState = 2 /* HandlerState.INITIALIZED */; +} +export function handleEvent(event) { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('Renderer Handler is not initialized'); + } + if (Types.TraceEvents.isThreadName(event) && event.args.name?.startsWith('CompositorTileWorker')) { + compositorTileWorkers.push({ + pid: event.pid, + tid: event.tid, + }); + } + if (Types.TraceEvents.isTraceEventBegin(event) || Types.TraceEvents.isTraceEventEnd(event)) { + const process = getOrCreateRendererProcess(processes, event.pid); + const thread = getOrCreateRendererThread(process, event.tid); + const completeEvent = makeCompleteEvent(event); + if (!completeEvent) { + return; + } + thread.entries.push(completeEvent); + allTraceEntries.push(completeEvent); + return; + } + if (Types.TraceEvents.isTraceEventInstant(event) || Types.TraceEvents.isTraceEventComplete(event)) { + const process = getOrCreateRendererProcess(processes, event.pid); + const thread = getOrCreateRendererThread(process, event.tid); + thread.entries.push(event); + allTraceEntries.push(event); + } +} +export async function finalize() { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('Renderer Handler is not initialized'); + } + const { mainFrameId, rendererProcessesByFrame, threadsInProcess } = metaHandlerData(); + assignMeta(processes, mainFrameId, rendererProcessesByFrame, threadsInProcess); + sanitizeProcesses(processes); + buildHierarchy(processes); + sanitizeThreads(processes); + Helpers.Trace.sortTraceEventsInPlace(allTraceEntries); + handlerState = 3 /* HandlerState.FINALIZED */; +} +export function data() { + if (handlerState !== 3 /* HandlerState.FINALIZED */) { + throw new Error('Renderer Handler is not finalized'); + } + return { + processes: new Map(processes), + compositorTileWorkers: new Map(gatherCompositorThreads()), + entryToNode: new Map(entryToNode), + allTraceEntries: [...allTraceEntries], + }; +} +function gatherCompositorThreads() { + const threadsByProcess = new Map(); + for (const worker of compositorTileWorkers) { + const byProcess = threadsByProcess.get(worker.pid) || []; + byProcess.push(worker.tid); + threadsByProcess.set(worker.pid, byProcess); + } + return threadsByProcess; +} +/** + * Steps through all the renderer processes we've located so far in the meta + * handler, obtaining their URL, checking whether they are the main frame, and + * collecting each one of their threads' name. This meta handler's data is + * assigned to the renderer handler's data. + */ +export function assignMeta(processes, mainFrameId, rendererProcessesByFrame, threadsInProcess) { + assignOrigin(processes, rendererProcessesByFrame); + assignIsMainFrame(processes, mainFrameId, rendererProcessesByFrame); + assignThreadName(processes, rendererProcessesByFrame, threadsInProcess); +} +/** + * Assigns origins to all threads in all processes. + * @see assignMeta + */ +export function assignOrigin(processes, rendererProcessesByFrame) { + for (const renderProcessesByPid of rendererProcessesByFrame.values()) { + for (const [pid, processWindows] of renderProcessesByPid) { + for (const processInfo of processWindows.flat()) { + const process = getOrCreateRendererProcess(processes, pid); + // Sometimes a single process is responsible with rendering multiple + // frames at the same time. For example, see https://crbug.com/1334563. + // When this happens, we'd still like to assign a single url per process + // so: 1) use the first frame rendered by this process as the url source + // and 2) if the last url is "about:blank", use the next frame's url, + // data from about:blank is irrelevant. + if (process.url === null || process.url === 'about:blank') { + // If we are here, it's because we care about this process and the URL. But before we store + // it, we check if it is a valid URL by trying to create a URL object. If it isn't, we won't + // set it, and this process will be filtered out later. + try { + new URL(processInfo.frame.url); + process.url = processInfo.frame.url; + } + catch (e) { + process.url = null; + } + } + } + } + } +} +/** + * Assigns whether or not a thread is the main frame to all threads in all processes. + * @see assignMeta + */ +export function assignIsMainFrame(processes, mainFrameId, rendererProcessesByFrame) { + for (const [frameId, renderProcessesByPid] of rendererProcessesByFrame) { + for (const [pid] of renderProcessesByPid) { + const process = getOrCreateRendererProcess(processes, pid); + // We have this go in one direction; once a renderer has been flagged as + // being on the main frame, we don't unset it to false if were to show up + // in a subframe. Equally, if we already saw this renderer in a subframe, + // but it becomes the main frame, the flag would get updated. + if (frameId === mainFrameId) { + process.isOnMainFrame = true; + } + } + } +} +/** + * Assigns the thread name to all threads in all processes. + * @see assignMeta + */ +export function assignThreadName(processes, rendererProcessesByFrame, threadsInProcess) { + for (const [pid, process] of processes) { + for (const [tid, threadInfo] of threadsInProcess.get(pid) ?? []) { + const thread = getOrCreateRendererThread(process, tid); + thread.name = threadInfo?.args.name ?? `${tid}`; + } + } +} +/** + * Removes unneeded trace data opportunistically stored while handling events. + * This currently does the following: + * - Deletes processes with an unkonwn origin. + */ +export function sanitizeProcesses(processes) { + const auctionWorklets = auctionWorkletsData().worklets; + const metaData = metaHandlerData(); + if (metaData.traceIsGeneric) { + return; + } + for (const [pid, process] of processes) { + // If the process had no url, or if it had a malformed url that could not be + // parsed for some reason, or if it's an "about:" origin, delete it. + // This is done because we don't really care about processes for which we + // can't provide actionable insights to the user (e.g. about:blank pages). + // + // There is one exception; AuctionWorklet processes get parsed in a + // separate handler, so at this point we check to see if the process has + // been found by the AuctionWorkletsHandler, and if so we update the URL. + // This ensures that we keep this process around and do not drop it due to + // the lack of a URL. + if (process.url === null) { + const maybeWorklet = auctionWorklets.get(pid); + if (maybeWorklet) { + process.url = maybeWorklet.host; + } + else { + processes.delete(pid); + } + continue; + } + } +} +/** + * Removes unneeded trace data opportunistically stored while handling events. + * This currently does the following: + * - Deletes threads with no roots. + */ +export function sanitizeThreads(processes) { + for (const [, process] of processes) { + for (const [tid, thread] of process.threads) { + // If the thread has no roots, delete it. Otherwise, there's going to + // be space taken, even though nothing is rendered in the track manager. + if (!thread.tree?.roots.size) { + process.threads.delete(tid); + } + } + } +} +/** + * Creates a hierarchical structure from the trace events. Each thread in each + * process will contribute to their own individual hierarchy. + * + * The trace data comes in as a contiguous array of events, against which we + * make a couple of assumptions: + * + * 1. Events are temporally-ordered in terms of start time (though they're + * not necessarily ordered as such in the data stream). + * 2. If event B's start and end times are within event A's time boundaries + * we assume that A is the parent of B. + * + * Therefore we expect to reformulate something like: + * + * [ Task A ][ Task B ][ Task C ][ Task D ][ Task E ] + * + * Into something hierarchically-arranged like below: + * + * |------------- Task A -------------||-- Task E --| + * |-- Task B --||-- Task D --| + * |- Task C -| + */ +export function buildHierarchy(processes, options) { + const samplesData = samplesHandlerData(); + for (const [pid, process] of processes) { + for (const [tid, thread] of process.threads) { + if (!thread.entries.length) { + thread.tree = Helpers.TreeHelpers.makeEmptyTraceEntryTree(); + continue; + } + // Step 1. Massage the data. + Helpers.Trace.sortTraceEventsInPlace(thread.entries); + // Step 2. Inject profile calls from samples + const samplesDataForThread = samplesData.profilesInProcess.get(pid)?.get(tid); + if (samplesDataForThread) { + const cpuProfile = samplesDataForThread.parsedProfile; + const samplesIntegrator = cpuProfile && + new Helpers.SamplesIntegrator.SamplesIntegrator(cpuProfile, samplesDataForThread.profileId, pid, tid, config); + const profileCalls = samplesIntegrator?.buildProfileCalls(thread.entries); + if (samplesIntegrator && profileCalls) { + allTraceEntries = [...allTraceEntries, ...profileCalls]; + thread.entries = Helpers.Trace.mergeEventsInOrder(thread.entries, profileCalls); + // We'll also inject the instant JSSample events (in debug mode only) + const jsSamples = samplesIntegrator.jsSampleEvents; + if (jsSamples) { + allTraceEntries = [...allTraceEntries, ...jsSamples]; + thread.entries = Helpers.Trace.mergeEventsInOrder(thread.entries, jsSamples); + } + } + } + // Step 3. Build the tree. + const treeData = Helpers.TreeHelpers.treify(thread.entries, options); + thread.tree = treeData.tree; + // Update the entryToNode map with the entries from this thread + for (const [entry, node] of treeData.entryToNode) { + entryToNode.set(entry, node); + } + } + } +} +export function makeCompleteEvent(event) { + if (Types.TraceEvents.isTraceEventEnd(event)) { + // Quietly ignore unbalanced close events, they're legit (we could + // have missed start one). + const beginEvent = completeEventStack.pop(); + if (!beginEvent) { + return null; + } + if (beginEvent.name !== event.name || beginEvent.cat !== event.cat) { + console.error('Begin/End events mismatch at ' + beginEvent.ts + ' (' + beginEvent.name + ') vs. ' + event.ts + ' (' + + event.name + ')'); + return null; + } + // Update the begin event's duration using the timestamp of the end + // event. + beginEvent.dur = Types.Timing.MicroSeconds(event.ts - beginEvent.ts); + return null; + } + // Create a synthetic event using the begin event, when we find the + // matching end event later we will update its duration. + const syntheticComplete = { + ...event, + ph: "X" /* Types.TraceEvents.Phase.COMPLETE */, + dur: Types.Timing.MicroSeconds(0), + }; + completeEventStack.push(syntheticComplete); + return syntheticComplete; +} +export function deps() { + return ['Meta', 'Samples', 'AuctionWorklets']; +} +//# sourceMappingURL=RendererHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/SamplesHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/SamplesHandler.js new file mode 100644 index 000000000..398ec32fc --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/SamplesHandler.js @@ -0,0 +1,221 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../../core/platform/platform.js'; +import * as CPUProfile from '../../cpu_profile/cpu_profile.js'; +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +const events = new Map(); +const profilesInProcess = new Map(); +const entryToNode = new Map(); +// The profile head, containing its metadata like its start +// time, comes in a "Profile" event. The sample data comes in +// "ProfileChunk" events. We match these ProfileChunks with their head +// using process and profile ids. However, in order to integrate sample +// data with trace data, we need the thread id that owns each profile. +// This thread id is extracted from the head event. +// For this reason, we have a preprocessed data structure, where events +// are matched by profile id, which we then finish processing to export +// events matched by thread id. +const preprocessedData = new Map(); +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +function buildProfileCalls() { + for (const [processId, profiles] of preprocessedData) { + for (const [profileId, preProcessedData] of profiles) { + const threadId = preProcessedData.threadId; + if (!preProcessedData.rawProfile.nodes.length || threadId === undefined) { + continue; + } + const indexStack = []; + const profileModel = new CPUProfile.CPUProfileDataModel.CPUProfileDataModel(preProcessedData.rawProfile); + const profileTree = Helpers.TreeHelpers.makeEmptyTraceEntryTree(); + profileTree.maxDepth = profileModel.maxDepth; + const finalizedData = { + rawProfile: preProcessedData.rawProfile, + parsedProfile: profileModel, + profileCalls: [], + profileTree, + profileId, + }; + const dataByThread = Platform.MapUtilities.getWithDefault(profilesInProcess, processId, () => new Map()); + profileModel.forEachFrame(openFrameCallback, closeFrameCallback); + dataByThread.set(threadId, finalizedData); + function openFrameCallback(depth, node, sampleIndex, timeStampMilliseconds) { + if (threadId === undefined) { + return; + } + const ts = Helpers.Timing.millisecondsToMicroseconds(Types.Timing.MilliSeconds(timeStampMilliseconds)); + const nodeId = node.id; + const profileCall = Helpers.Trace.makeProfileCall(node, profileId, sampleIndex, ts, processId, threadId); + finalizedData.profileCalls.push(profileCall); + indexStack.push(finalizedData.profileCalls.length - 1); + const traceEntryNode = Helpers.TreeHelpers.makeEmptyTraceEntryNode(profileCall, nodeId); + entryToNode.set(profileCall, traceEntryNode); + traceEntryNode.depth = depth; + if (indexStack.length === 1) { + // First call in the stack is a root call. + finalizedData.profileTree?.roots.add(traceEntryNode); + } + } + function closeFrameCallback(_depth, _node, _sampleIndex, _timeStampMillis, durMs, selfTimeMs) { + const profileCallIndex = indexStack.pop(); + const profileCall = profileCallIndex !== undefined && finalizedData.profileCalls[profileCallIndex]; + if (!profileCall) { + return; + } + const { callFrame, ts, pid, tid } = profileCall; + const traceEntryNode = entryToNode.get(profileCall); + if (callFrame === undefined || ts === undefined || pid === undefined || profileId === undefined || + tid === undefined || traceEntryNode === undefined) { + return; + } + const dur = Helpers.Timing.millisecondsToMicroseconds(Types.Timing.MilliSeconds(durMs)); + const selfTime = Helpers.Timing.millisecondsToMicroseconds(Types.Timing.MilliSeconds(selfTimeMs)); + profileCall.dur = dur; + profileCall.selfTime = selfTime; + const parentIndex = indexStack.at(-1); + const parent = parentIndex !== undefined && finalizedData.profileCalls.at(parentIndex); + const parentNode = parent && entryToNode.get(parent); + if (!parentNode) { + return; + } + traceEntryNode.parent = parentNode; + parentNode.children.push(traceEntryNode); + } + } + } +} +export function reset() { + events.clear(); + preprocessedData.clear(); + profilesInProcess.clear(); + entryToNode.clear(); + handlerState = 1 /* HandlerState.UNINITIALIZED */; +} +export function initialize() { + if (handlerState !== 1 /* HandlerState.UNINITIALIZED */) { + throw new Error('Samples Handler was not reset'); + } + handlerState = 2 /* HandlerState.INITIALIZED */; +} +export function handleEvent(event) { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('Samples Handler is not initialized'); + } + /** + * A fake trace event created to support CDP.Profiler.Profiles in the + * trace engine. + */ + if (Types.TraceEvents.isSyntheticCpuProfile(event)) { + // At the moment we are attaching to a single node target so we + // should only get a single CPU profile. The values of the process + // id and thread id are not really important, so we use the data + // in the fake event. Should multi-thread CPU profiling be supported + // we could use these fields in the event to pass thread info. + const pid = event.pid; + const tid = event.tid; + // Create an arbitrary profile id. + const profileId = '0x1'; + const profileData = getOrCreatePreProcessedData(pid, profileId); + profileData.rawProfile = event.args.data.cpuProfile; + profileData.threadId = tid; + return; + } + if (Types.TraceEvents.isTraceEventProfile(event)) { + // Do not use event.args.data.startTime as it is in CLOCK_MONOTONIC domain, + // but use profileEvent.ts which has been translated to Perfetto's clock + // domain. Also convert from ms to us. + // Note: events are collected on a different thread than what's sampled. + // The correct process and thread ids are specified by the profile. + const profileData = getOrCreatePreProcessedData(event.pid, event.id); + profileData.rawProfile.startTime = event.ts; + profileData.threadId = event.tid; + return; + } + if (Types.TraceEvents.isTraceEventProfileChunk(event)) { + const profileData = getOrCreatePreProcessedData(event.pid, event.id); + const cdpProfile = profileData.rawProfile; + const nodesAndSamples = event.args?.data?.cpuProfile || { samples: [] }; + const samples = nodesAndSamples?.samples || []; + const nodes = []; + for (const n of nodesAndSamples?.nodes || []) { + const lineNumber = typeof n.callFrame.lineNumber === 'undefined' ? -1 : n.callFrame.lineNumber; + const columnNumber = typeof n.callFrame.columnNumber === 'undefined' ? -1 : n.callFrame.columnNumber; + const scriptId = String(n.callFrame.scriptId); + const url = n.callFrame.url || ''; + const node = { + ...n, + callFrame: { + ...n.callFrame, + url, + lineNumber, + columnNumber, + scriptId, + }, + }; + nodes.push(node); + } + const timeDeltas = event.args.data?.timeDeltas || []; + const lines = event.args.data?.lines || Array(samples.length).fill(0); + cdpProfile.nodes.push(...nodes); + cdpProfile.samples?.push(...samples); + cdpProfile.timeDeltas?.push(...timeDeltas); + cdpProfile.lines?.push(...lines); + if (cdpProfile.samples && cdpProfile.timeDeltas && cdpProfile.samples.length !== cdpProfile.timeDeltas.length) { + console.error('Failed to parse CPU profile.'); + return; + } + if (!cdpProfile.endTime && cdpProfile.timeDeltas) { + const timeDeltas = cdpProfile.timeDeltas; + cdpProfile.endTime = timeDeltas.reduce((x, y) => x + y, cdpProfile.startTime); + } + return; + } +} +export async function finalize() { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('Samples Handler is not initialized'); + } + buildProfileCalls(); + handlerState = 3 /* HandlerState.FINALIZED */; +} +export function data() { + if (handlerState !== 3 /* HandlerState.FINALIZED */) { + throw new Error('Samples Handler is not finalized'); + } + return { + profilesInProcess, + entryToNode, + }; +} +function getOrCreatePreProcessedData(processId, profileId) { + const profileById = Platform.MapUtilities.getWithDefault(preprocessedData, processId, () => new Map()); + return Platform.MapUtilities.getWithDefault(profileById, profileId, () => ({ + rawProfile: { + startTime: 0, + endTime: 0, + nodes: [], + samples: [], + timeDeltas: [], + lines: [], + }, + profileId, + })); +} +/** + * Returns the name of a function for a given synthetic profile call. + * We first look to find the ProfileNode representing this call, and use its + * function name. This is preferred (and should always exist) because if we + * resolve sourcemaps, we will update this name. If that name is not present, + * we fall back to the function name that was in the callframe that we got + * when parsing the profile's trace data. + */ +export function getProfileCallFunctionName(data, entry) { + const profile = data.profilesInProcess.get(entry.pid)?.get(entry.tid); + const node = profile?.parsedProfile.nodeById(entry.nodeId); + if (node?.functionName) { + return node.functionName; + } + return entry.callFrame.functionName; +} +//# sourceMappingURL=SamplesHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/ScreenshotsHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/ScreenshotsHandler.js new file mode 100644 index 000000000..9b097c457 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/ScreenshotsHandler.js @@ -0,0 +1,81 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +// Each thread contains events. Events indicate the thread and process IDs, which are +// used to store the event in the correct process thread entry below. +const unpairedAsyncEvents = []; +const snapshotEvents = []; +const syntheticScreenshotEvents = []; +let frameSequenceToTs = {}; +export function reset() { + unpairedAsyncEvents.length = 0; + snapshotEvents.length = 0; + syntheticScreenshotEvents.length = 0; + frameSequenceToTs = {}; +} +export function handleEvent(event) { + if (Types.TraceEvents.isTraceEventScreenshot(event)) { + snapshotEvents.push(event); + } + else if (Types.TraceEvents.isTraceEventPipelineReporter(event)) { + unpairedAsyncEvents.push(event); + } +} +export async function finalize() { + const pipelineReporterEvents = Helpers.Trace.createMatchedSortedSyntheticEvents(unpairedAsyncEvents); + frameSequenceToTs = Object.fromEntries(pipelineReporterEvents.map(evt => { + const frameSequenceId = evt.args.data.beginEvent.args.chrome_frame_reporter.frame_sequence; + const presentationTs = Types.Timing.MicroSeconds(evt.ts + evt.dur); + return [frameSequenceId, presentationTs]; + })); + for (const snapshotEvent of snapshotEvents) { + const { cat, name, ph, pid, tid } = snapshotEvent; + const syntheticEvent = { + rawSourceEvent: snapshotEvent, + cat, + name, + ph, + pid, + tid, + // `getPresentationTimestamp(snapshotEvent) - snapshotEvent.ts` is how many microsec the screenshot was adjusted to the right/later + ts: getPresentationTimestamp(snapshotEvent), + args: { + dataUri: `data:image/jpg;base64,${snapshotEvent.args.snapshot}`, + }, + }; + syntheticScreenshotEvents.push(syntheticEvent); + } +} +/** + * Correct the screenshot timestamps + * The screenshot 'snapshot object' trace event has the "frame sequence number" attached as an ID. + * We match that up with the "PipelineReporter" trace events as they terminate at presentation. + * Presentation == when the pixels hit the screen. AKA Swap on the GPU + */ +function getPresentationTimestamp(screenshotEvent) { + const frameSequence = parseInt(screenshotEvent.id, 16); + // If it's 1, then it's an old trace (before https://crrev.com/c/4957973) and cannot be corrected. + if (frameSequence === 1) { + return screenshotEvent.ts; + } + // The screenshot trace event's `ts` reflects the "expected display time" which is ESTIMATE. + // It is set by the compositor frame sink from the `expected_display_time`, which is based on a previously known + // frame start PLUS the vsync interval (eg 16.6ms) + const updatedTs = frameSequenceToTs[frameSequence]; + // Do we always find a match? No... + // We generally don't match the very first screenshot and, sometimes, the last + // The very first screenshot is requested immediately (even if nothing is painting). As a result there's no compositor + // instrumentation running alongside. + // The last one is sometimes missing as because the trace terminates right before the associated PipelineReporter is emitted. + return updatedTs ?? screenshotEvent.ts; +} +// TODO(crbug/41484172): should be readonly +export function data() { + return syntheticScreenshotEvents; +} +export function deps() { + return ['Meta']; +} +//# sourceMappingURL=ScreenshotsHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/SelectorStatsHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/SelectorStatsHandler.js new file mode 100644 index 000000000..09d6e06e8 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/SelectorStatsHandler.js @@ -0,0 +1,28 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Types from '../types/types.js'; +let lastUpdateLayoutTreeEvent = null; +const selectorDataForUpdateLayoutTree = new Map(); +export function reset() { + lastUpdateLayoutTreeEvent = null; + selectorDataForUpdateLayoutTree.clear(); +} +export function handleEvent(event) { + if (Types.TraceEvents.isTraceEventSelectorStats(event) && lastUpdateLayoutTreeEvent && event.args.selector_stats) { + selectorDataForUpdateLayoutTree.set(lastUpdateLayoutTreeEvent, { + timings: event.args.selector_stats.selector_timings, + }); + return; + } + if (Types.TraceEvents.isTraceEventUpdateLayoutTree(event)) { + lastUpdateLayoutTreeEvent = event; + return; + } +} +export function data() { + return { + dataForUpdateLayoutEvent: selectorDataForUpdateLayoutTree, + }; +} +//# sourceMappingURL=SelectorStatsHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/Threads.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/Threads.js new file mode 100644 index 000000000..b5ae726d8 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/Threads.js @@ -0,0 +1,95 @@ +function getThreadTypeForRendererThread(pid, thread, auctionWorkletsData) { + let threadType = "OTHER" /* ThreadType.OTHER */; + if (thread.name === 'CrRendererMain') { + threadType = "MAIN_THREAD" /* ThreadType.MAIN_THREAD */; + } + else if (thread.name === 'DedicatedWorker thread') { + threadType = "WORKER" /* ThreadType.WORKER */; + } + else if (thread.name?.startsWith('CompositorTileWorker')) { + threadType = "RASTERIZER" /* ThreadType.RASTERIZER */; + } + else if (auctionWorkletsData.worklets.has(pid)) { + threadType = "AUCTION_WORKLET" /* ThreadType.AUCTION_WORKLET */; + } + else if (thread.name?.startsWith('ThreadPool')) { + // TODO(paulirish): perhaps exclude ThreadPoolServiceThread entirely + threadType = "THREAD_POOL" /* ThreadType.THREAD_POOL */; + } + return threadType; +} +export function threadsInRenderer(rendererData, auctionWorkletsData) { + const foundThreads = []; + // If we have Renderer threads, we prefer to use those. In the event that a + // trace is a CPU Profile trace, we will never have Renderer threads, so we + // know if there are no Renderer threads that we can fallback to using the + // data from the SamplesHandler. + if (rendererData.processes.size) { + for (const [pid, process] of rendererData.processes) { + for (const [tid, thread] of process.threads) { + if (!thread.tree) { + // Drop threads where we could not create the tree; this indicates + // unexpected data and we won't be able to support all the UI + // filtering we need. + continue; + } + const threadType = getThreadTypeForRendererThread(pid, thread, auctionWorkletsData); + foundThreads.push({ + name: thread.name, + pid, + tid, + processIsOnMainFrame: process.isOnMainFrame, + entries: thread.entries, + tree: thread.tree, + type: threadType, + entryToNode: rendererData.entryToNode, + }); + } + } + } + return foundThreads; +} +/** + * Given trace parsed data, this helper will return a high level array of + * ThreadData. This is useful because it allows you to get a list of threads + * regardless of if the trace is a CPU Profile or a Tracing profile. Thus you + * can use this helper to iterate over threads in confidence that it will work + * for both trace types. + */ +export function threadsInTrace(traceParseData) { + // If we have Renderer threads, we prefer to use those. In the event that a + // trace is a CPU Profile trace, we will never have Renderer threads, so we + // know if there are no Renderer threads that we can fallback to using the + // data from the SamplesHandler. + const threadsFromRenderer = threadsInRenderer(traceParseData.Renderer, traceParseData.AuctionWorklets); + if (threadsFromRenderer.length) { + return threadsFromRenderer; + } + const foundThreads = []; + if (traceParseData.Samples.profilesInProcess.size) { + for (const [pid, process] of traceParseData.Samples.profilesInProcess) { + for (const [tid, thread] of process) { + if (!thread.profileTree) { + // Drop threads where we could not create the tree; this indicates + // unexpected data and we won't be able to support all the UI + // filtering we need. + continue; + } + foundThreads.push({ + pid, + tid, + // CPU Profile threads do not have a name. + name: null, + entries: thread.profileCalls, + // There is no concept of a "Main Frame" in a CPU profile. + processIsOnMainFrame: false, + tree: thread.profileTree, + type: "CPU_PROFILE" /* ThreadType.CPU_PROFILE */, + entryToNode: traceParseData.Samples.entryToNode, + }); + } + } + } + return foundThreads; +} +//# sourceMappingURL=Threads.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/UserInteractionsHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/UserInteractionsHandler.js new file mode 100644 index 000000000..e91c995d1 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/UserInteractionsHandler.js @@ -0,0 +1,285 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +import { data as metaHandlerData } from './MetaHandler.js'; +// This handler serves two purposes. It generates a list of events that are +// used to show user clicks in the timeline. It is also used to gather +// EventTimings into Interactions, which we use to show interactions and +// highlight long interactions to the user, along with INP. +// We don't need to know which process / thread these events occurred in, +// because they are effectively global, so we just track all that we find. +const allEvents = []; +const beginCommitCompositorFrameEvents = []; +export const LONG_INTERACTION_THRESHOLD = Helpers.Timing.millisecondsToMicroseconds(Types.Timing.MilliSeconds(200)); +let longestInteractionEvent = null; +const interactionEvents = []; +const interactionEventsWithNoNesting = []; +const eventTimingEndEventsById = new Map(); +const eventTimingStartEventsForInteractions = []; +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +export function reset() { + allEvents.length = 0; + beginCommitCompositorFrameEvents.length = 0; + interactionEvents.length = 0; + eventTimingStartEventsForInteractions.length = 0; + eventTimingEndEventsById.clear(); + interactionEventsWithNoNesting.length = 0; + longestInteractionEvent = null; + handlerState = 2 /* HandlerState.INITIALIZED */; +} +export function handleEvent(event) { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('Handler is not initialized'); + } + if (Types.TraceEvents.isTraceEventBeginCommitCompositorFrame(event)) { + beginCommitCompositorFrameEvents.push(event); + return; + } + if (!Types.TraceEvents.isTraceEventEventTiming(event)) { + return; + } + if (Types.TraceEvents.isTraceEventEventTimingEnd(event)) { + // Store the end event; for each start event that is an interaction, we need the matching end event to calculate the duration correctly. + eventTimingEndEventsById.set(event.id, event); + } + allEvents.push(event); + // From this point on we want to find events that represent interactions. + // These events are always start events - those are the ones that contain all + // the metadata about the interaction. + if (!event.args.data || !Types.TraceEvents.isTraceEventEventTimingStart(event)) { + return; + } + const { duration, interactionId } = event.args.data; + // We exclude events for the sake of interactions if: + // 1. They have no duration. + // 2. They have no interactionId + // 3. They have an interactionId of 0: this indicates that it's not an + // interaction that we care about because it hasn't had its own interactionId + // set (0 is the default on the backend). + // See: https://source.chromium.org/chromium/chromium/src/+/main:third_party/blink/renderer/core/timing/responsiveness_metrics.cc;l=133;drc=40c209a9c365ebb9f16fb99dfe78c7fe768b9594 + if (duration < 1 || interactionId === undefined || interactionId === 0) { + return; + } + // Store the start event. In the finalize() function we will pair this with + // its end event and create the synthetic interaction event. + eventTimingStartEventsForInteractions.push(event); +} +/** + * See https://web.dev/better-responsiveness-metric/#interaction-types for the + * table that defines these sets. + **/ +const pointerEventTypes = new Set([ + 'pointerdown', + 'touchstart', + 'pointerup', + 'touchend', + 'mousedown', + 'mouseup', + 'click', +]); +const keyboardEventTypes = new Set([ + 'keydown', + 'keypress', + 'keyup', +]); +export function categoryOfInteraction(interaction) { + if (pointerEventTypes.has(interaction.type)) { + return 'POINTER'; + } + if (keyboardEventTypes.has(interaction.type)) { + return 'KEYBOARD'; + } + return 'OTHER'; +} +/** + * We define a set of interactions as nested where: + * 1. Their end times align. + * 2. The longest interaction's start time is earlier than all other + * interactions with the same end time. + * 3. The interactions are of the same category [each interaction is either + * categorised as keyboard, or pointer.] + * + * =============A=[pointerup]= + * ====B=[pointerdown]= + * ===C=[pointerdown]== + * ===D=[pointerup]=== + * + * In this example, B, C and D are all nested and therefore should not be + * returned from this function. + * + * However, in this example we would only consider B nested (under A) and D + * nested (under C). A and C both stay because they are of different types. + * ========A=[keydown]==== + * =======B=[keyup]===== + * ====C=[pointerdown]= + * =D=[pointerup]= + **/ +export function removeNestedInteractions(interactions) { + /** + * Because we nest events only that are in the same category, we store the + * longest event for a given end time by category. + **/ + const earliestEventForEndTimePerCategory = { + POINTER: new Map(), + KEYBOARD: new Map(), + OTHER: new Map(), + }; + function storeEventIfEarliestForCategoryAndEndTime(interaction) { + const category = categoryOfInteraction(interaction); + const earliestEventForEndTime = earliestEventForEndTimePerCategory[category]; + const endTime = Types.Timing.MicroSeconds(interaction.ts + interaction.dur); + const earliestCurrentEvent = earliestEventForEndTime.get(endTime); + if (!earliestCurrentEvent) { + earliestEventForEndTime.set(endTime, interaction); + return; + } + if (interaction.ts < earliestCurrentEvent.ts) { + earliestEventForEndTime.set(endTime, interaction); + } + else if (interaction.ts === earliestCurrentEvent.ts && + interaction.interactionId === earliestCurrentEvent.interactionId) { + // We have seen in traces that the same interaction can have multiple + // events (e.g. a 'click' and a 'pointerdown'). Often only one of these + // events will have an event handler bound to it which caused delay on + // the main thread, and the others will not. This leads to a situation + // where if we pick one of the events that had no event handler, its + // processing duration (processingEnd - processingStart) will be 0, but if we + // had picked the event that had the slow event handler, we would show + // correctly the main thread delay due to the event handler. + // So, if we find events with the same interactionId and the same + // begin/end times, we pick the one with the largest (processingEnd - + // processingStart) time in order to make sure we find the event with the + // worst main thread delay, as that is the one the user should care + // about. + const currentProcessingDuration = earliestCurrentEvent.processingEnd - earliestCurrentEvent.processingStart; + const newProcessingDuration = interaction.processingEnd - interaction.processingStart; + // Use the new interaction if it has a longer processing duration than the existing one. + if (newProcessingDuration > currentProcessingDuration) { + earliestEventForEndTime.set(endTime, interaction); + } + } + // Maximize the processing duration based on the "children" interactions. + // We pick the earliest start processing duration, and the latest end + // processing duration to avoid under-reporting. + if (interaction.processingStart < earliestCurrentEvent.processingStart) { + earliestCurrentEvent.processingStart = interaction.processingStart; + writeSyntheticTimespans(earliestCurrentEvent); + } + if (interaction.processingEnd > earliestCurrentEvent.processingEnd) { + earliestCurrentEvent.processingEnd = interaction.processingEnd; + writeSyntheticTimespans(earliestCurrentEvent); + } + } + for (const interaction of interactions) { + storeEventIfEarliestForCategoryAndEndTime(interaction); + } + // Combine all the events that we have kept from all the per-category event + // maps back into an array and sort them by timestamp. + const keptEvents = Object.values(earliestEventForEndTimePerCategory) + .flatMap(eventsByEndTime => Array.from(eventsByEndTime.values())); + keptEvents.sort((eventA, eventB) => { + return eventA.ts - eventB.ts; + }); + return keptEvents; +} +function writeSyntheticTimespans(event) { + const startEvent = event.args.data.beginEvent; + const endEvent = event.args.data.endEvent; + event.inputDelay = Types.Timing.MicroSeconds(event.processingStart - startEvent.ts); + event.mainThreadHandling = Types.Timing.MicroSeconds(event.processingEnd - event.processingStart); + event.presentationDelay = Types.Timing.MicroSeconds(endEvent.ts - event.processingEnd); +} +export async function finalize() { + const { navigationsByFrameId } = metaHandlerData(); + // For each interaction start event, find the async end event by the ID, and then create the Synthetic Interaction event. + for (const interactionStartEvent of eventTimingStartEventsForInteractions) { + const endEvent = eventTimingEndEventsById.get(interactionStartEvent.id); + if (!endEvent) { + // If we cannot find an end event, bail and drop this event. + continue; + } + if (!interactionStartEvent.args.data?.type || !interactionStartEvent.args.data?.interactionId) { + // A valid interaction event that we care about has to have a type (e.g. + // pointerdown, keyup). + // + // We also need to ensure it has an interactionId. We already checked + // this in the handleEvent() function, but we do it here also to satisfy + // TypeScript. + continue; + } + // In the future we will add microsecond timestamps to the trace events, + // but until then we can use the millisecond precision values that are in + // the trace event. To adjust them to be relative to the event.ts and the + // trace timestamps, for both processingStart and processingEnd we subtract + // the event timestamp (NOT event.ts, but the timeStamp millisecond value + // emitted in args.data), and then add that value to the event.ts. This + // will give us a processingStart and processingEnd time in microseconds + // that is relative to event.ts, and can be used when drawing boxes. + // There is some inaccuracy here as we are converting milliseconds to microseconds, but it is good enough until the backend emits more accurate numbers. + const processingStartRelativeToTraceTime = Types.Timing.MicroSeconds(Helpers.Timing.millisecondsToMicroseconds(interactionStartEvent.args.data.processingStart) - + Helpers.Timing.millisecondsToMicroseconds(interactionStartEvent.args.data.timeStamp) + + interactionStartEvent.ts); + const processingEndRelativeToTraceTime = Types.Timing.MicroSeconds((Helpers.Timing.millisecondsToMicroseconds(interactionStartEvent.args.data.processingEnd) - + Helpers.Timing.millisecondsToMicroseconds(interactionStartEvent.args.data.timeStamp)) + + interactionStartEvent.ts); + const frameId = interactionStartEvent.args.frame ?? interactionStartEvent.args.data.frame; + const navigation = Helpers.Trace.getNavigationForTraceEvent(interactionStartEvent, frameId, navigationsByFrameId); + const navigationId = navigation?.args.data?.navigationId; + const interactionEvent = { + // Use the start event to define the common fields. + rawSourceEvent: interactionStartEvent, + cat: interactionStartEvent.cat, + name: interactionStartEvent.name, + pid: interactionStartEvent.pid, + tid: interactionStartEvent.tid, + ph: interactionStartEvent.ph, + processingStart: processingStartRelativeToTraceTime, + processingEnd: processingEndRelativeToTraceTime, + // These will be set in writeSyntheticTimespans() + inputDelay: Types.Timing.MicroSeconds(-1), + mainThreadHandling: Types.Timing.MicroSeconds(-1), + presentationDelay: Types.Timing.MicroSeconds(-1), + args: { + data: { + beginEvent: interactionStartEvent, + endEvent: endEvent, + frame: frameId, + navigationId, + }, + }, + ts: interactionStartEvent.ts, + dur: Types.Timing.MicroSeconds(endEvent.ts - interactionStartEvent.ts), + type: interactionStartEvent.args.data.type, + interactionId: interactionStartEvent.args.data.interactionId, + }; + writeSyntheticTimespans(interactionEvent); + interactionEvents.push(interactionEvent); + } + handlerState = 3 /* HandlerState.FINALIZED */; + interactionEventsWithNoNesting.push(...removeNestedInteractions(interactionEvents)); + // Pick the longest interactions from the set that were not nested, as we + // know those are the set of the largest interactions. + for (const interactionEvent of interactionEventsWithNoNesting) { + if (!longestInteractionEvent || longestInteractionEvent.dur < interactionEvent.dur) { + longestInteractionEvent = interactionEvent; + } + } +} +export function data() { + return { + allEvents, + beginCommitCompositorFrameEvents, + interactionEvents, + interactionEventsWithNoNesting, + longestInteractionEvent, + interactionsOverThreshold: new Set(interactionEvents.filter(event => { + return event.dur > LONG_INTERACTION_THRESHOLD; + })), + }; +} +export function deps() { + return ['Meta']; +} +//# sourceMappingURL=UserInteractionsHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/UserTimingsHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/UserTimingsHandler.js new file mode 100644 index 000000000..3a2322ae2 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/UserTimingsHandler.js @@ -0,0 +1,109 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +/** + * IMPORTANT! + * See UserTimings.md in this directory for some handy documentation on + * UserTimings and the trace events we parse currently. + **/ +let syntheticEvents = []; +const performanceMeasureEvents = []; +const performanceMarkEvents = []; +const consoleTimings = []; +const timestampEvents = []; +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +export function reset() { + syntheticEvents.length = 0; + performanceMeasureEvents.length = 0; + performanceMarkEvents.length = 0; + consoleTimings.length = 0; + timestampEvents.length = 0; + handlerState = 2 /* HandlerState.INITIALIZED */; +} +const resourceTimingNames = [ + 'workerStart', + 'redirectStart', + 'redirectEnd', + 'fetchStart', + 'domainLookupStart', + 'domainLookupEnd', + 'connectStart', + 'connectEnd', + 'secureConnectionStart', + 'requestStart', + 'responseStart', + 'responseEnd', +]; +const navTimingNames = [ + 'navigationStart', + 'unloadEventStart', + 'unloadEventEnd', + 'redirectStart', + 'redirectEnd', + 'fetchStart', + 'commitNavigationEnd', + 'domainLookupStart', + 'domainLookupEnd', + 'connectStart', + 'connectEnd', + 'secureConnectionStart', + 'requestStart', + 'responseStart', + 'responseEnd', + 'domLoading', + 'domInteractive', + 'domContentLoadedEventStart', + 'domContentLoadedEventEnd', + 'domComplete', + 'loadEventStart', + 'loadEventEnd', +]; +export function handleEvent(event) { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('UserTimings handler is not initialized'); + } + // These are events dispatched under the blink.user_timing category + // but that the user didn't add. Filter them out so that they do not + // Appear in the timings track (they still appear in the main thread + // flame chart). + const ignoredNames = [...resourceTimingNames, ...navTimingNames]; + if (ignoredNames.includes(event.name)) { + return; + } + if (Types.TraceEvents.isTraceEventPerformanceMeasure(event)) { + performanceMeasureEvents.push(event); + return; + } + if (Types.TraceEvents.isTraceEventPerformanceMark(event)) { + performanceMarkEvents.push(event); + } + if (Types.TraceEvents.isTraceEventConsoleTime(event)) { + consoleTimings.push(event); + } + if (Types.TraceEvents.isTraceEventTimeStamp(event)) { + timestampEvents.push(event); + } +} +export async function finalize() { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('UserTimings handler is not initialized'); + } + const asyncEvents = [...performanceMeasureEvents, ...consoleTimings]; + syntheticEvents = Helpers.Trace.createMatchedSortedSyntheticEvents(asyncEvents); + handlerState = 3 /* HandlerState.FINALIZED */; +} +export function data() { + if (handlerState !== 3 /* HandlerState.FINALIZED */) { + throw new Error('UserTimings handler is not finalized'); + } + return { + performanceMeasures: syntheticEvents.filter(e => e.cat === 'blink.user_timing'), + consoleTimings: syntheticEvents.filter(e => e.cat === 'blink.console'), + // TODO(crbug/41484172): UserTimingsHandler.test.ts fails if this is not copied. + performanceMarks: [...performanceMarkEvents], + timestampEvents: [...timestampEvents], + }; +} +//# sourceMappingURL=UserTimingsHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/WarningsHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/WarningsHandler.js new file mode 100644 index 000000000..68806b7e5 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/WarningsHandler.js @@ -0,0 +1,124 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../../core/platform/platform.js'; +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +import { data as userInteractionsHandlerData } from './UserInteractionsHandler.js'; +const warningsPerEvent = new Map(); +const eventsPerWarning = new Map(); +/** + * Tracks the stack formed by nested trace events up to a given point + */ +const allEventsStack = []; +/** + * Tracks the stack formed by JS invocation trace events up to a given point. + * F.e. FunctionCall, EvaluateScript, V8Execute. + * Not to be confused with ProfileCalls. + */ +const jsInvokeStack = []; +/** + * Tracks reflow events in a task. + */ +const taskReflowEvents = []; +export const FORCED_REFLOW_THRESHOLD = Helpers.Timing.millisecondsToMicroseconds(Types.Timing.MilliSeconds(30)); +export const LONG_MAIN_THREAD_TASK_THRESHOLD = Helpers.Timing.millisecondsToMicroseconds(Types.Timing.MilliSeconds(50)); +export function reset() { + warningsPerEvent.clear(); + eventsPerWarning.clear(); + allEventsStack.length = 0; + jsInvokeStack.length = 0; + taskReflowEvents.length = 0; +} +function storeWarning(event, warning) { + const existingWarnings = Platform.MapUtilities.getWithDefault(warningsPerEvent, event, () => []); + existingWarnings.push(warning); + warningsPerEvent.set(event, existingWarnings); + const existingEvents = Platform.MapUtilities.getWithDefault(eventsPerWarning, warning, () => []); + existingEvents.push(event); + eventsPerWarning.set(warning, existingEvents); +} +export function handleEvent(event) { + processForcedReflowWarning(event); + if (event.name === "RunTask" /* Types.TraceEvents.KnownEventName.RunTask */) { + const { duration } = Helpers.Timing.eventTimingsMicroSeconds(event); + if (duration > LONG_MAIN_THREAD_TASK_THRESHOLD) { + storeWarning(event, 'LONG_TASK'); + } + return; + } + if (Types.TraceEvents.isTraceEventFireIdleCallback(event)) { + const { duration } = Helpers.Timing.eventTimingsMilliSeconds(event); + if (duration > event.args.data.allottedMilliseconds) { + storeWarning(event, 'IDLE_CALLBACK_OVER_TIME'); + } + return; + } +} +/** + * Reflows* are added a warning to if: + * 1. They are forced/sync, meaning they are invoked by JS and finish + * during the Script execution. + * 2. Their duration exceeds a threshold. + * - *Reflow: The style recalculation and layout steps in a render task. + */ +function processForcedReflowWarning(event) { + // Update the event and the JS invocation stacks. + accomodateEventInStack(event, allEventsStack); + accomodateEventInStack(event, jsInvokeStack, /* pushEventToStack */ Types.TraceEvents.isJSInvocationEvent(event)); + if (jsInvokeStack.length) { + // Current event falls inside a JS call. + if (event.name === "Layout" /* Types.TraceEvents.KnownEventName.Layout */ || + event.name === "UpdateLayoutTree" /* Types.TraceEvents.KnownEventName.UpdateLayoutTree */) { + // A forced reflow happened. However we need to check if + // the threshold is surpassed to add a warning. Accumulate the + // event to check for this after the current Task is over. + taskReflowEvents.push(event); + return; + } + } + if (allEventsStack.length === 1) { + // We hit a new task. Check if the forced reflows in the previous + // task exceeded the threshold and add a warning if so. + const totalTime = taskReflowEvents.reduce((time, event) => time + (event.dur || 0), 0); + if (totalTime >= FORCED_REFLOW_THRESHOLD) { + taskReflowEvents.forEach(reflowEvent => storeWarning(reflowEvent, 'FORCED_REFLOW')); + } + taskReflowEvents.length = 0; + } +} +/** + * Updates a given trace event stack given a new event. + */ +function accomodateEventInStack(event, stack, pushEventToStack = true) { + let nextItem = stack.at(-1); + while (nextItem && event.ts > nextItem.ts + (nextItem.dur || 0)) { + stack.pop(); + nextItem = stack.at(-1); + } + if (!pushEventToStack) { + return; + } + stack.push(event); +} +export function deps() { + return ['UserInteractions']; +} +export async function finalize() { + // These events do exist on the UserInteractionsHandler, but we also put + // them into the WarningsHandler so that the warnings handler can be the + // source of truth and the way to look up all warnings for a given event. + // Otherwise, we would have to look up warnings across multiple handlers for + // a given event, which will start to get messy very quickly. + const longInteractions = userInteractionsHandlerData().interactionsOverThreshold; + for (const interaction of longInteractions) { + storeWarning(interaction, 'LONG_INTERACTION'); + } +} +export function data() { + return { + perEvent: warningsPerEvent, + perWarning: eventsPerWarning, + }; +} +//# sourceMappingURL=WarningsHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/WorkersHandler.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/WorkersHandler.js new file mode 100644 index 000000000..ab5156b9d --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/WorkersHandler.js @@ -0,0 +1,52 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Types from '../types/types.js'; +let handlerState = 1 /* HandlerState.UNINITIALIZED */; +const sessionIdEvents = []; +const workerIdByThread = new Map(); +const workerURLById = new Map(); +export function initialize() { + if (handlerState !== 1 /* HandlerState.UNINITIALIZED */) { + throw new Error('Workers Handler was not reset'); + } + handlerState = 2 /* HandlerState.INITIALIZED */; +} +export function reset() { + sessionIdEvents.length = 0; + workerIdByThread.clear(); + workerURLById.clear(); + handlerState = 1 /* HandlerState.UNINITIALIZED */; +} +export function handleEvent(event) { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('Workers Handler is not initialized'); + } + if (Types.TraceEvents.isTraceEventTracingSessionIdForWorker(event)) { + sessionIdEvents.push(event); + } +} +export async function finalize() { + if (handlerState !== 2 /* HandlerState.INITIALIZED */) { + throw new Error('Handler is not initialized'); + } + for (const sessionIdEvent of sessionIdEvents) { + if (!sessionIdEvent.args.data) { + continue; + } + workerIdByThread.set(sessionIdEvent.args.data.workerThreadId, sessionIdEvent.args.data.workerId); + workerURLById.set(sessionIdEvent.args.data.workerId, sessionIdEvent.args.data.url); + } + handlerState = 3 /* HandlerState.FINALIZED */; +} +export function data() { + if (handlerState !== 3 /* HandlerState.FINALIZED */) { + throw new Error('Workers Handler is not finalized'); + } + return { + workerSessionIdEvents: sessionIdEvents, + workerIdByThread, + workerURLById, + }; +} +//# sourceMappingURL=WorkersHandler.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/bundle-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/handlers/bundle-tsconfig.json new file mode 100644 index 000000000..39ba96fbd --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/bundle-tsconfig.json @@ -0,0 +1 @@ +{"compilerOptions":{"composite":true,"outDir":".","baseUrl":".","rootDir":"../../../../../../../front_end/models/trace/handlers"},"files":["../../../../../../../front_end/models/trace/handlers/handlers.ts"],"references":[{"path":"./handlers-tsconfig.json"}]} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/devtools_entrypoint-bundle-typescript-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/handlers/devtools_entrypoint-bundle-typescript-tsconfig.json new file mode 100644 index 000000000..7ee1f2ec0 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/devtools_entrypoint-bundle-typescript-tsconfig.json @@ -0,0 +1,43 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../../front_end/models/trace/handlers", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "devtools_entrypoint-bundle-typescript-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../../front_end/models/trace/handlers/handlers.ts", + "../../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "./handlers-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/handlers-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/handlers/handlers-tsconfig.json new file mode 100644 index 000000000..9198c9ae7 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/handlers-tsconfig.json @@ -0,0 +1,82 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../../front_end/models/trace/handlers", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "handlers-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../../front_end/models/trace/handlers/AnimationHandler.ts", + "../../../../../../../front_end/models/trace/handlers/AuctionWorkletsHandler.ts", + "../../../../../../../front_end/models/trace/handlers/ExtensionTraceDataHandler.ts", + "../../../../../../../front_end/models/trace/handlers/FramesHandler.ts", + "../../../../../../../front_end/models/trace/handlers/GPUHandler.ts", + "../../../../../../../front_end/models/trace/handlers/ImagePaintingHandler.ts", + "../../../../../../../front_end/models/trace/handlers/InitiatorsHandler.ts", + "../../../../../../../front_end/models/trace/handlers/InvalidationsHandler.ts", + "../../../../../../../front_end/models/trace/handlers/LargestImagePaintHandler.ts", + "../../../../../../../front_end/models/trace/handlers/LargestTextPaintHandler.ts", + "../../../../../../../front_end/models/trace/handlers/LayerTreeHandler.ts", + "../../../../../../../front_end/models/trace/handlers/LayoutShiftsHandler.ts", + "../../../../../../../front_end/models/trace/handlers/MemoryHandler.ts", + "../../../../../../../front_end/models/trace/handlers/MetaHandler.ts", + "../../../../../../../front_end/models/trace/handlers/ModelHandlers.ts", + "../../../../../../../front_end/models/trace/handlers/NetworkRequestsHandler.ts", + "../../../../../../../front_end/models/trace/handlers/PageFramesHandler.ts", + "../../../../../../../front_end/models/trace/handlers/PageLoadMetricsHandler.ts", + "../../../../../../../front_end/models/trace/handlers/RendererHandler.ts", + "../../../../../../../front_end/models/trace/handlers/SamplesHandler.ts", + "../../../../../../../front_end/models/trace/handlers/ScreenshotsHandler.ts", + "../../../../../../../front_end/models/trace/handlers/SelectorStatsHandler.ts", + "../../../../../../../front_end/models/trace/handlers/Threads.ts", + "../../../../../../../front_end/models/trace/handlers/UserInteractionsHandler.ts", + "../../../../../../../front_end/models/trace/handlers/UserTimingsHandler.ts", + "../../../../../../../front_end/models/trace/handlers/WarningsHandler.ts", + "../../../../../../../front_end/models/trace/handlers/WorkersHandler.ts", + "../../../../../../../front_end/models/trace/handlers/types.ts", + "../../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "../../../core/platform/bundle-tsconfig.json" + }, + { + "path": "../../../generated/generated-tsconfig.json" + }, + { + "path": "../../cpu_profile/bundle-tsconfig.json" + }, + { + "path": "../helpers/bundle-tsconfig.json" + }, + { + "path": "../types/bundle-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/handlers.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/handlers.js new file mode 100644 index 000000000..e1ffd863a --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/handlers.js @@ -0,0 +1,7 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export * as ModelHandlers from './ModelHandlers.js'; +export * as Threads from './Threads.js'; +export * as Types from './types.js'; +//# sourceMappingURL=handlers.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/handlers/types.js b/node_modules/@paulirish/trace_engine/models/trace/handlers/types.js new file mode 100644 index 000000000..59713adc6 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/handlers/types.js @@ -0,0 +1,18 @@ +import * as ModelHandlers from './ModelHandlers.js'; +/** + * Because you can run the trace engine with a subset of handlers enabled, + * there can be times when you need to confirm if the trace contains all + * handlers or not, because some parts of the engine expect to be given all + * the handlers. + */ +export function handlerDataHasAllHandlers(data) { + let isMissingHandler = false; + for (const handlerName of Object.keys(ModelHandlers)) { + if (handlerName in data === false) { + isMissingHandler = true; + break; + } + } + return !isMissingHandler; +} +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/helpers/Extensions.js b/node_modules/@paulirish/trace_engine/models/trace/helpers/Extensions.js new file mode 100644 index 000000000..44c33fb9d --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/helpers/Extensions.js @@ -0,0 +1,26 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../../core/platform/platform.js'; +import { sortTraceEventsInPlace } from './Trace.js'; +import { canBuildTreesFromEvents, treify } from './TreeHelpers.js'; +export function buildTrackDataFromExtensionEntries(extensionEntries, extensionTrackData) { + const dataByTrack = new Map(); + for (const entry of extensionEntries) { + const trackData = Platform.MapUtilities.getWithDefault(dataByTrack, `${entry.args.metadata.extensionName}.${entry.args.track}`, () => ({ + name: entry.args.track, + extensionName: entry.args.metadata.extensionName, + flameChartEntries: [], + })); + trackData.flameChartEntries.push(entry); + } + for (const trackData of dataByTrack.values()) { + sortTraceEventsInPlace(trackData.flameChartEntries); + if (canBuildTreesFromEvents(trackData.flameChartEntries)) { + treify(trackData.flameChartEntries); + } + extensionTrackData.push(trackData); + } + return extensionTrackData; +} +//# sourceMappingURL=Extensions.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/helpers/SamplesIntegrator.js b/node_modules/@paulirish/trace_engine/models/trace/helpers/SamplesIntegrator.js new file mode 100644 index 000000000..f565e5c40 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/helpers/SamplesIntegrator.js @@ -0,0 +1,425 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Types from '../types/types.js'; +import { millisecondsToMicroseconds } from './Timing.js'; +import { makeProfileCall, mergeEventsInOrder } from './Trace.js'; +/** + * This is a helper that integrates CPU profiling data coming in the + * shape of samples, with trace events. Samples indicate what the JS + * stack trace looked at a given point in time, but they don't have + * duration. The SamplesIntegrator task is to make an approximation + * of what the duration of each JS call was, given the sample data and + * given the trace events profiled during that time. At the end of its + * execution, the SamplesIntegrator returns an array of ProfileCalls + * (under SamplesIntegrator::buildProfileCalls()), which + * represent JS calls, with a call frame and duration. These calls have + * the shape of a complete trace events and can be treated as flame + * chart entries in the timeline. + * + * The approach to build the profile calls consists in tracking the + * current stack as the following events happen (in order): + * 1. A sample was done. + * 2. A trace event started. + * 3. A trace event ended. + * Depending on the event and on the data that's coming with it the + * stack is updated by adding or removing JS calls to it and updating + * the duration of the calls in the tracking stack. + * + * note: Although this approach has been implemented since long ago, and + * is relatively efficent (adds a complexity over the trace parsing of + * O(n) where n is the number of samples) it has proven to be faulty. + * It might be worthwhile experimenting with improvements or with a + * completely different approach. Improving the approach is tracked in + * crbug.com/1417439 + */ +export class SamplesIntegrator { + /** + * The result of runing the samples integrator. Holds the JS calls + * with their approximated duration after integrating samples into the + * trace event tree. + */ + #constructedProfileCalls = []; + /** + * tracks the state of the JS stack at each point in time to update + * the profile call durations as new events arrive. This doesn't only + * happen with new profile calls (in which case we would compare the + * stack in them) but also with trace events (in which case we would + * update the duration of the events we are tracking at the moment). + */ + #currentJSStack = []; + /** + * Process holding the CPU profile and trace events. + */ + #processId; + /** + * Thread holding the CPU profile and trace events. + */ + #threadId; + /** + * Tracks the depth of the JS stack at the moment a trace event starts + * or ends. It is assumed that for the duration of a trace event, the + * JS stack's depth cannot decrease, since JS calls that started + * before a trace event cannot end during the trace event. So as trace + * events arrive, we store the "locked" amount of JS frames that were + * in the stack before the event came. + */ + #lockedJsStackDepth = []; + /** + * Used to keep track when samples should be integrated even if they + * are not children of invocation trace events. This is useful in + * cases where we can be missing the start of JS invocation events if + * we start tracing half-way through. + */ + #fakeJSInvocation = false; + /** + * The parsed CPU profile, holding the tree hierarchy of JS frames and + * the sample data. + */ + #profileModel; + /** + * Because GC nodes don't have a stack, we artificially add a stack to + * them which corresponds to that of the previous sample. This map + * tracks which node is used for the stack of a GC call. + * Note that GC samples are not shown in the flamechart, however they + * are used during the construction of for profile calls, as we can + * infer information about the duration of the executed code when a + * GC node is sampled. + */ + #nodeForGC = new Map(); + #engineConfig; + #profileId; + /** + * Keeps track of the individual samples from the CPU Profile. + * Only used with Debug Mode experiment enabled. + */ + jsSampleEvents = []; + constructor(profileModel, profileId, pid, tid, configuration) { + this.#profileModel = profileModel; + this.#threadId = tid; + this.#processId = pid; + this.#engineConfig = configuration || Types.Configuration.defaults(); + this.#profileId = profileId; + } + buildProfileCalls(traceEvents) { + const mergedEvents = mergeEventsInOrder(traceEvents, this.callsFromProfileSamples()); + const stack = []; + for (let i = 0; i < mergedEvents.length; i++) { + const event = mergedEvents[i]; + // Because instant trace events have no duration, they don't provide + // useful information for possible changes in the duration of calls + // in the JS stack. + if (event.ph === "I" /* Types.TraceEvents.Phase.INSTANT */) { + continue; + } + if (stack.length === 0) { + if (Types.TraceEvents.isProfileCall(event)) { + this.#onProfileCall(event); + continue; + } + stack.push(event); + this.#onTraceEventStart(event); + continue; + } + const parentEvent = stack.at(-1); + if (parentEvent === undefined) { + continue; + } + const begin = event.ts; + const parentBegin = parentEvent.ts; + const parentDuration = parentEvent.dur || 0; + const parentEnd = parentBegin + parentDuration; + const startsAfterParent = begin >= parentEnd; + if (startsAfterParent) { + this.#onTraceEventEnd(parentEvent); + stack.pop(); + i--; + continue; + } + if (Types.TraceEvents.isProfileCall(event)) { + this.#onProfileCall(event, parentEvent); + continue; + } + this.#onTraceEventStart(event); + stack.push(event); + } + while (stack.length) { + const last = stack.pop(); + if (last) { + this.#onTraceEventEnd(last); + } + } + return this.#constructedProfileCalls; + } + #onTraceEventStart(event) { + // Top level events cannot be nested into JS frames so we reset + // the stack when we find one. + if (event.name === "RunMicrotasks" /* Types.TraceEvents.KnownEventName.RunMicrotasks */ || + event.name === "RunTask" /* Types.TraceEvents.KnownEventName.RunTask */) { + this.#lockedJsStackDepth = []; + this.#truncateJSStack(0, event.ts); + this.#fakeJSInvocation = false; + } + if (this.#fakeJSInvocation) { + this.#truncateJSStack(this.#lockedJsStackDepth.pop() || 0, event.ts); + this.#fakeJSInvocation = false; + } + this.#extractStackTrace(event); + // Keep track of the call frames in the stack before the event + // happened. For the duration of this event, these frames cannot + // change (none can be terminated before this event finishes). + // + // Also, every frame that is opened after this event, is considered + // to be a descendant of the event. So once the event finishes, the + // frames that were opened after it, need to be closed (see + // onEndEvent). + // + // TODO(crbug.com/1417439): + // The assumption that every frame opened after an event is a + // descendant of the event is incorrect. For example, a JS call that + // parents a trace event might have been sampled after the event was + // dispatched. In this case the JS call would be discarded if this + // event isn't an invocation event, otherwise the call will be + // considered a child of the event. In both cases, the result would + // be incorrect. + this.#lockedJsStackDepth.push(this.#currentJSStack.length); + } + #onProfileCall(event, parent) { + if ((parent && Types.TraceEvents.isJSInvocationEvent(parent)) || this.#fakeJSInvocation) { + this.#extractStackTrace(event); + } + else if (Types.TraceEvents.isProfileCall(event) && this.#currentJSStack.length === 0) { + // Force JS Samples to show up even if we are not inside a JS + // invocation event, because we can be missing the start of JS + // invocation events if we start tracing half-way through. Pretend + // we have a top-level JS invocation event. + this.#fakeJSInvocation = true; + const stackDepthBefore = this.#currentJSStack.length; + this.#extractStackTrace(event); + this.#lockedJsStackDepth.push(stackDepthBefore); + } + } + #onTraceEventEnd(event) { + // Because the event has ended, any frames that happened after + // this event are terminated. Frames that are ancestors to this + // event are extended to cover its ending. + const endTime = Types.Timing.MicroSeconds(event.ts + (event.dur || 0)); + this.#truncateJSStack(this.#lockedJsStackDepth.pop() || 0, endTime); + } + /** + * Builds the initial calls with no duration from samples. Their + * purpose is to be merged with the trace event array being parsed so + * that they can be traversed in order with them and their duration + * can be updated as the SampleIntegrator callbacks are invoked. + */ + callsFromProfileSamples() { + const samples = this.#profileModel.samples; + const timestamps = this.#profileModel.timestamps; + const debugModeEnabled = this.#engineConfig.debugMode; + if (!samples) { + return []; + } + const calls = []; + let prevNode; + for (let i = 0; i < samples.length; i++) { + const node = this.#profileModel.nodeByIndex(i); + const timestamp = millisecondsToMicroseconds(Types.Timing.MilliSeconds(timestamps[i])); + if (!node) { + continue; + } + const call = makeProfileCall(node, this.#profileId, i, timestamp, this.#processId, this.#threadId); + calls.push(call); + if (debugModeEnabled) { + this.jsSampleEvents.push(this.#makeJSSampleEvent(call, timestamp)); + } + if (node.id === this.#profileModel.gcNode?.id && prevNode) { + // GC samples have no stack, so we just put GC node on top of the + // last recorded sample. Cache the previous sample for future + // reference. + this.#nodeForGC.set(call, prevNode); + continue; + } + prevNode = node; + } + return calls; + } + #makeProfileCallsForStack(profileCall) { + let node = this.#profileModel.nodeById(profileCall.nodeId); + const isGarbageCollection = node?.id === this.#profileModel.gcNode?.id; + if (isGarbageCollection) { + // Because GC don't have a stack, we use the stack of the previous + // sample. + node = this.#nodeForGC.get(profileCall) || null; + } + if (!node) { + return []; + } + // `node.depth` is 0 based, so to set the size of the array we need + // to add 1 to its value. + const callFrames = new Array(node.depth + 1 + Number(isGarbageCollection)); + // Add the stack trace in reverse order (bottom first). + let i = callFrames.length - 1; + if (isGarbageCollection) { + // Place the garbage collection call frame on top of the stack. + callFrames[i--] = profileCall; + } + // Many of these ProfileCalls will be GC'd later when we estimate the frame + // durations + while (node) { + callFrames[i--] = makeProfileCall(node, profileCall.profileId, profileCall.sampleIndex, profileCall.ts, this.#processId, this.#threadId); + node = node.parent; + } + return callFrames; + } + /** + * Update tracked stack using this event's call stack. + */ + #extractStackTrace(event) { + const stackTrace = Types.TraceEvents.isProfileCall(event) ? this.#makeProfileCallsForStack(event) : this.#currentJSStack; + SamplesIntegrator.filterStackFrames(stackTrace, this.#engineConfig); + const endTime = event.ts + (event.dur || 0); + const minFrames = Math.min(stackTrace.length, this.#currentJSStack.length); + let i; + // Merge a sample's stack frames with the stack frames we have + // so far if we detect they are equivalent. + // Graphically + // This: + // Current stack trace Sample + // [-------A------] [A] + // [-------B------] [B] + // [-------C------] [C] + // ^ t = x1 ^ t = x2 + // Becomes this: + // New stack trace after merge + // [--------A-------] + // [--------B-------] + // [--------C-------] + // ^ t = x2 + for (i = this.#lockedJsStackDepth.at(-1) || 0; i < minFrames; ++i) { + const newFrame = stackTrace[i].callFrame; + const oldFrame = this.#currentJSStack[i].callFrame; + if (!SamplesIntegrator.framesAreEqual(newFrame, oldFrame)) { + break; + } + // Scoot the right edge of this callFrame to the right + this.#currentJSStack[i].dur = + Types.Timing.MicroSeconds(Math.max(this.#currentJSStack[i].dur || 0, endTime - this.#currentJSStack[i].ts)); + } + // If there are call frames in the sample that differ with the stack + // we have, update the stack, but keeping the common frames in place + // Graphically + // This: + // Current stack trace Sample + // [-------A------] [A] + // [-------B------] [B] + // [-------C------] [C] + // [-------D------] [E] + // ^ t = x1 ^ t = x2 + // Becomes this: + // New stack trace after merge + // [--------A-------] + // [--------B-------] + // [--------C-------] + // [E] + // ^ t = x2 + this.#truncateJSStack(i, event.ts); + for (; i < stackTrace.length; ++i) { + const call = stackTrace[i]; + if (call.nodeId === this.#profileModel.programNode?.id || call.nodeId === this.#profileModel.root?.id || + call.nodeId === this.#profileModel.idleNode?.id || call.nodeId === this.#profileModel.gcNode?.id) { + // Skip (root), (program) and (idle) frames, since this are not + // relevant for web profiling and we don't want to show them in + // the timeline. + continue; + } + this.#currentJSStack.push(call); + this.#constructedProfileCalls.push(call); + } + } + /** + * When a call stack that differs from the one we are tracking has + * been detected in the samples, the latter is "truncated" by + * setting the ending time of its call frames and removing the top + * call frames that aren't shared with the new call stack. This way, + * we can update the tracked stack with the new call frames on top. + * @param depth the amount of call frames from bottom to top that + * should be kept in the tracking stack trace. AKA amount of shared + * call frames between two stacks. + * @param time the new end of the call frames in the stack. + */ + #truncateJSStack(depth, time) { + if (this.#lockedJsStackDepth.length) { + const lockedDepth = this.#lockedJsStackDepth.at(-1); + if (lockedDepth && depth < lockedDepth) { + console.error(`Child stack is shallower (${depth}) than the parent stack (${lockedDepth}) at ${time}`); + depth = lockedDepth; + } + } + if (this.#currentJSStack.length < depth) { + console.error(`Trying to truncate higher than the current stack size at ${time}`); + depth = this.#currentJSStack.length; + } + for (let k = 0; k < this.#currentJSStack.length; ++k) { + this.#currentJSStack[k].dur = Types.Timing.MicroSeconds(Math.max(time - this.#currentJSStack[k].ts, 0)); + } + this.#currentJSStack.length = depth; + } + #makeJSSampleEvent(call, timestamp) { + const JSSampleEvent = { + name: "JSSample" /* Types.TraceEvents.KnownEventName.JSSample */, + cat: 'devtools.timeline', + args: { + data: { stackTrace: this.#makeProfileCallsForStack(call).map(e => e.callFrame) }, + }, + ph: "I" /* Types.TraceEvents.Phase.INSTANT */, + ts: timestamp, + dur: Types.Timing.MicroSeconds(0), + pid: this.#processId, + tid: this.#threadId, + }; + return JSSampleEvent; + } + static framesAreEqual(frame1, frame2) { + return frame1.scriptId === frame2.scriptId && frame1.functionName === frame2.functionName && + frame1.lineNumber === frame2.lineNumber; + } + static showNativeName(name, runtimeCallStatsEnabled) { + return runtimeCallStatsEnabled && Boolean(SamplesIntegrator.nativeGroup(name)); + } + static nativeGroup(nativeName) { + if (nativeName.startsWith('Parse')) { + return 'Parse'; + } + if (nativeName.startsWith('Compile') || nativeName.startsWith('Recompile')) { + return 'Compile'; + } + return null; + } + static isNativeRuntimeFrame(frame) { + return frame.url === 'native V8Runtime'; + } + static filterStackFrames(stack, engineConfig) { + const showAllEvents = engineConfig.showAllEvents; + if (showAllEvents) { + return; + } + let previousNativeFrameName = null; + let j = 0; + for (let i = 0; i < stack.length; ++i) { + const frame = stack[i].callFrame; + const nativeRuntimeFrame = SamplesIntegrator.isNativeRuntimeFrame(frame); + if (nativeRuntimeFrame && + !SamplesIntegrator.showNativeName(frame.functionName, engineConfig.includeRuntimeCallStats)) { + continue; + } + const nativeFrameName = nativeRuntimeFrame ? SamplesIntegrator.nativeGroup(frame.functionName) : null; + if (previousNativeFrameName && previousNativeFrameName === nativeFrameName) { + continue; + } + previousNativeFrameName = nativeFrameName; + stack[j++] = stack[i]; + } + stack.length = j; + } +} +//# sourceMappingURL=SamplesIntegrator.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/helpers/Timing.js b/node_modules/@paulirish/trace_engine/models/trace/helpers/Timing.js new file mode 100644 index 000000000..35de4441e --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/helpers/Timing.js @@ -0,0 +1,162 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../../core/platform/platform.js'; +import * as Types from '../types/types.js'; +import { getNavigationForTraceEvent } from './Trace.js'; +export const millisecondsToMicroseconds = (value) => Types.Timing.MicroSeconds(value * 1000); +export const secondsToMilliseconds = (value) => Types.Timing.MilliSeconds(value * 1000); +export const secondsToMicroseconds = (value) => millisecondsToMicroseconds(secondsToMilliseconds(value)); +export const microSecondsToMilliseconds = (value) => Types.Timing.MilliSeconds(value / 1000); +export const microSecondsToSeconds = (value) => Types.Timing.Seconds(value / 1000 / 1000); +export function detectBestTimeUnit(timeInMicroseconds) { + if (timeInMicroseconds < 1000) { + return 0 /* Types.Timing.TimeUnit.MICROSECONDS */; + } + const timeInMilliseconds = timeInMicroseconds / 1000; + if (timeInMilliseconds < 1000) { + return 1 /* Types.Timing.TimeUnit.MILLISECONDS */; + } + const timeInSeconds = timeInMilliseconds / 1000; + if (timeInSeconds < 60) { + return 2 /* Types.Timing.TimeUnit.SECONDS */; + } + return 3 /* Types.Timing.TimeUnit.MINUTES */; +} +const defaultFormatOptions = { + style: 'unit', + unit: 'millisecond', + unitDisplay: 'narrow', +}; +// Create a bunch of common formatters up front, so that we're not creating +// them repeatedly during rendering. +const serialize = (value) => JSON.stringify(value); +const formatterFactory = (key) => { + // If we pass undefined as the locale, that achieves two things: + // 1. Avoids us referencing window.navigatior to fetch the locale, which is + // useful given long term we would like this engine to run in NodeJS + // environments. + // 2. Will cause the formatter to fallback to the locale of the system, which + // is likely going to be the most accurate one to use anyway. + return new Intl.NumberFormat(undefined, key ? JSON.parse(key) : {}); +}; +const formatters = new Map(); +// Microsecond Formatter. +Platform.MapUtilities.getWithDefault(formatters, serialize({ style: 'decimal' }), formatterFactory); +// Millisecond Formatter +Platform.MapUtilities.getWithDefault(formatters, serialize(defaultFormatOptions), formatterFactory); +// Second Formatter +Platform.MapUtilities.getWithDefault(formatters, serialize({ ...defaultFormatOptions, unit: 'second' }), formatterFactory); +// Minute Formatter +Platform.MapUtilities.getWithDefault(formatters, serialize({ ...defaultFormatOptions, unit: 'minute' }), formatterFactory); +export function formatMicrosecondsTime(timeInMicroseconds, opts = {}) { + if (!opts.format) { + opts.format = detectBestTimeUnit(timeInMicroseconds); + } + const timeInMilliseconds = timeInMicroseconds / 1000; + const timeInSeconds = timeInMilliseconds / 1000; + const formatterOpts = { ...defaultFormatOptions, ...opts }; + switch (opts.format) { + case 0 /* Types.Timing.TimeUnit.MICROSECONDS */: { + const formatter = Platform.MapUtilities.getWithDefault(formatters, serialize({ style: 'decimal' }), formatterFactory); + return `${formatter.format(timeInMicroseconds)}μs`; + } + case 1 /* Types.Timing.TimeUnit.MILLISECONDS */: { + const formatter = Platform.MapUtilities.getWithDefault(formatters, serialize(formatterOpts), formatterFactory); + return formatter.format(timeInMilliseconds); + } + case 2 /* Types.Timing.TimeUnit.SECONDS */: { + const formatter = Platform.MapUtilities.getWithDefault(formatters, serialize({ ...formatterOpts, unit: 'second' }), formatterFactory); + return formatter.format(timeInSeconds); + } + default: { + // Switch to mins & seconds. + const minuteFormatter = Platform.MapUtilities.getWithDefault(formatters, serialize({ ...formatterOpts, unit: 'minute' }), formatterFactory); + const secondFormatter = Platform.MapUtilities.getWithDefault(formatters, serialize({ ...formatterOpts, unit: 'second' }), formatterFactory); + const timeInMinutes = timeInSeconds / 60; + const [mins, divider, fraction] = minuteFormatter.formatToParts(timeInMinutes); + let seconds = 0; + if (divider && fraction) { + // Convert the fraction value (a string) to the nearest second. + seconds = Math.round(Number(`0.${fraction.value}`) * 60); + } + return `${minuteFormatter.format(Number(mins.value))} ${secondFormatter.format(seconds)}`; + } + } +} +export function timeStampForEventAdjustedByClosestNavigation(event, traceBounds, navigationsByNavigationId, navigationsByFrameId) { + let eventTimeStamp = event.ts - traceBounds.min; + if (event.args?.data?.navigationId) { + const navigationForEvent = navigationsByNavigationId.get(event.args.data.navigationId); + if (navigationForEvent) { + eventTimeStamp = event.ts - navigationForEvent.ts; + } + } + else if (event.args?.data?.frame) { + const navigationForEvent = getNavigationForTraceEvent(event, event.args.data.frame, navigationsByFrameId); + if (navigationForEvent) { + eventTimeStamp = event.ts - navigationForEvent.ts; + } + } + return Types.Timing.MicroSeconds(eventTimeStamp); +} +export function eventTimingsMicroSeconds(event) { + return { + startTime: event.ts, + endTime: Types.Timing.MicroSeconds(event.ts + (event.dur || Types.Timing.MicroSeconds(0))), + duration: Types.Timing.MicroSeconds(event.dur || 0), + // TODO(crbug.com/1434599): Implement selfTime calculation for events + // from the new engine. + selfTime: Types.TraceEvents.isSyntheticTraceEntry(event) ? Types.Timing.MicroSeconds(event.selfTime || 0) : + Types.Timing.MicroSeconds(event.dur || 0), + }; +} +export function eventTimingsMilliSeconds(event) { + const microTimes = eventTimingsMicroSeconds(event); + return { + startTime: microSecondsToMilliseconds(microTimes.startTime), + endTime: microSecondsToMilliseconds(microTimes.endTime), + duration: microSecondsToMilliseconds(microTimes.duration), + selfTime: microSecondsToMilliseconds(microTimes.selfTime), + }; +} +export function eventTimingsSeconds(event) { + const microTimes = eventTimingsMicroSeconds(event); + return { + startTime: microSecondsToSeconds(microTimes.startTime), + endTime: microSecondsToSeconds(microTimes.endTime), + duration: microSecondsToSeconds(microTimes.duration), + selfTime: microSecondsToSeconds(microTimes.selfTime), + }; +} +export function traceWindowMilliSeconds(bounds) { + return { + min: microSecondsToMilliseconds(bounds.min), + max: microSecondsToMilliseconds(bounds.max), + range: microSecondsToMilliseconds(bounds.range), + }; +} +export function traceWindowMillisecondsToMicroSeconds(bounds) { + return { + min: millisecondsToMicroseconds(bounds.min), + max: millisecondsToMicroseconds(bounds.max), + range: millisecondsToMicroseconds(bounds.range), + }; +} +export function traceWindowFromMilliSeconds(min, max) { + const traceWindow = { + min: millisecondsToMicroseconds(min), + max: millisecondsToMicroseconds(max), + range: Types.Timing.MicroSeconds(millisecondsToMicroseconds(max) - millisecondsToMicroseconds(min)), + }; + return traceWindow; +} +export function traceWindowFromMicroSeconds(min, max) { + const traceWindow = { + min, + max, + range: Types.Timing.MicroSeconds(max - min), + }; + return traceWindow; +} +//# sourceMappingURL=Timing.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/helpers/Trace.js b/node_modules/@paulirish/trace_engine/models/trace/helpers/Trace.js new file mode 100644 index 000000000..988653428 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/helpers/Trace.js @@ -0,0 +1,511 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../../core/platform/platform.js'; +import * as Types from '../types/types.js'; +import { eventTimingsMicroSeconds } from './Timing.js'; +/** + * Extracts the raw stack trace of known trace events. Most likely than + * not you want to use `getZeroIndexedStackTraceForEvent`, which returns + * the stack with zero based numbering. Since some trace events are + * one based this function can yield unexpected results when used + * indiscriminately. + */ +function stackTraceForEvent(event) { + if (Types.TraceEvents.isSyntheticInvalidation(event)) { + return event.stackTrace || null; + } + if (event.args?.data?.stackTrace) { + return event.args.data.stackTrace; + } + if (Types.TraceEvents.isTraceEventUpdateLayoutTree(event)) { + return event.args.beginData?.stackTrace || null; + } + return null; +} +export function extractOriginFromTrace(firstNavigationURL) { + const url = new URL(firstNavigationURL); + if (url) { + // We do this to save some space in the toolbar - seeing the `www` is less + // useful than seeing `foo.com` if it's truncated at narrow widths + if (url.host.startsWith('www.')) { + return url.host.slice(4); + } + return url.host; + } + return null; +} +// Each thread contains events. Events indicate the thread and process IDs, which are +// used to store the event in the correct process thread entry below. +export function addEventToProcessThread(event, eventsInProcessThread) { + const { tid, pid } = event; + let eventsInThread = eventsInProcessThread.get(pid); + if (!eventsInThread) { + eventsInThread = new Map(); + } + let events = eventsInThread.get(tid); + if (!events) { + events = []; + } + events.push(event); + eventsInThread.set(event.tid, events); + eventsInProcessThread.set(event.pid, eventsInThread); +} +export function eventTimeComparator(a, b) { + const aBeginTime = a.ts; + const bBeginTime = b.ts; + if (aBeginTime < bBeginTime) { + return -1; + } + if (aBeginTime > bBeginTime) { + return 1; + } + const aDuration = a.dur ?? 0; + const bDuration = b.dur ?? 0; + const aEndTime = aBeginTime + aDuration; + const bEndTime = bBeginTime + bDuration; + if (aEndTime > bEndTime) { + return -1; + } + if (aEndTime < bEndTime) { + return 1; + } + return 0; +} +/** + * Sorts all the events in place, in order, by their start time. If they have + * the same start time, orders them by longest first. + */ +export function sortTraceEventsInPlace(events) { + events.sort(eventTimeComparator); +} +/** + * Returns an array of ordered events that results after merging the two + * ordered input arrays. + */ +export function mergeEventsInOrder(eventsArray1, eventsArray2) { + const result = []; + let i = 0; + let j = 0; + while (i < eventsArray1.length && j < eventsArray2.length) { + const event1 = eventsArray1[i]; + const event2 = eventsArray2[j]; + const compareValue = eventTimeComparator(event1, event2); + if (compareValue <= 0) { + result.push(event1); + i++; + } + if (compareValue === 1) { + result.push(event2); + j++; + } + } + while (i < eventsArray1.length) { + result.push(eventsArray1[i++]); + } + while (j < eventsArray2.length) { + result.push(eventsArray2[j++]); + } + return result; +} +export function getNavigationForTraceEvent(event, eventFrameId, navigationsByFrameId) { + const navigations = navigationsByFrameId.get(eventFrameId); + if (!navigations || eventFrameId === '') { + // This event's navigation has been filtered out by the meta handler as a noise event + // or contains an empty frameId. + return null; + } + const eventNavigationIndex = Platform.ArrayUtilities.nearestIndexFromEnd(navigations, navigation => navigation.ts <= event.ts); + if (eventNavigationIndex === null) { + // This event's navigation has been filtered out by the meta handler as a noise event. + return null; + } + return navigations[eventNavigationIndex]; +} +export function extractId(event) { + return event.id ?? event.id2?.global ?? event.id2?.local; +} +export function activeURLForFrameAtTime(frameId, time, rendererProcessesByFrame) { + const processData = rendererProcessesByFrame.get(frameId); + if (!processData) { + return null; + } + for (const processes of processData.values()) { + for (const processInfo of processes) { + if (processInfo.window.min > time || processInfo.window.max < time) { + continue; + } + return processInfo.frame.url; + } + } + return null; +} +/** + * @param node the node attached to the profile call. Here a node represents a function in the call tree. + * @param profileId the profile ID that the sample came from that backs this call. + * @param sampleIndex the index of the sample in the given profile that this call was created from + * @param ts the timestamp of the profile call + * @param pid the process ID of the profile call + * @param tid the thread ID of the profile call + * + * See `panels/timeline/docs/profile_calls.md` for more context on how these events are created. + */ +export function makeProfileCall(node, profileId, sampleIndex, ts, pid, tid) { + return { + cat: '', + name: 'ProfileCall', + nodeId: node.id, + args: {}, + ph: "X" /* Types.TraceEvents.Phase.COMPLETE */, + pid, + tid, + ts, + dur: Types.Timing.MicroSeconds(0), + selfTime: Types.Timing.MicroSeconds(0), + callFrame: node.callFrame, + sampleIndex, + profileId, + }; +} +export function makeSyntheticTraceEntry(name, ts, pid, tid) { + return { + cat: '', + name, + args: {}, + ph: "X" /* Types.TraceEvents.Phase.COMPLETE */, + pid, + tid, + ts, + dur: Types.Timing.MicroSeconds(0), + selfTime: Types.Timing.MicroSeconds(0), + }; +} +/** + * Matches beginning events with TraceEventPairableAsyncEnd and TraceEventPairableAsyncInstant (ASYNC_NESTABLE_INSTANT) + * if provided, though currently only coming from Animations. Traces may contain multiple instant events so we need to + * account for that. + * + * @returns {Map} Map of the animation's ID to it's matching events. + */ +export function matchEvents(unpairedEvents) { + // map to store begin and end of the event + const matchedPairs = new Map(); + // looking for start and end + for (const event of unpairedEvents) { + const syntheticId = getSyntheticId(event); + if (syntheticId === undefined) { + continue; + } + // Create a synthetic id to prevent collisions across categories. + // Console timings can be dispatched with the same id, so use the + // event name as well to generate unique ids. + const otherEventsWithID = Platform.MapUtilities.getWithDefault(matchedPairs, syntheticId, () => { + return { begin: null, end: null, instant: [] }; + }); + const isStartEvent = event.ph === "b" /* Types.TraceEvents.Phase.ASYNC_NESTABLE_START */; + const isEndEvent = event.ph === "e" /* Types.TraceEvents.Phase.ASYNC_NESTABLE_END */; + const isInstantEvent = event.ph === "n" /* Types.TraceEvents.Phase.ASYNC_NESTABLE_INSTANT */; + if (isStartEvent) { + otherEventsWithID.begin = event; + } + else if (isEndEvent) { + otherEventsWithID.end = event; + } + else if (isInstantEvent) { + if (!otherEventsWithID.instant) { + otherEventsWithID.instant = []; + } + otherEventsWithID.instant.push(event); + } + } + return matchedPairs; +} +function getSyntheticId(event) { + const id = extractId(event); + return id && `${event.cat}:${id}:${event.name}`; +} +export function createSortedSyntheticEvents(matchedPairs, syntheticEventCallback) { + const syntheticEvents = []; + for (const [id, eventsTriplet] of matchedPairs.entries()) { + const beginEvent = eventsTriplet.begin; + const endEvent = eventsTriplet.end; + const instantEvents = eventsTriplet.instant; + if (!beginEvent || !(endEvent || instantEvents)) { + // This should never happen, the backend only creates the events once it + // has them both (beginEvent & endEvent/instantEvents), so we should never get into this state. + // If we do, something is very wrong, so let's just drop that problematic event. + continue; + } + const triplet = { beginEvent, endEvent, instantEvents }; + /** + * When trying to pair events with instant events present, there are times when these + * ASYNC_NESTABLE_INSTANT ('n') don't have a corresponding ASYNC_NESTABLE_END ('e') event. + * In these cases, pair without needing the endEvent. + */ + function eventsArePairable(data) { + const instantEventsMatch = data.instantEvents ? data.instantEvents.some(e => id === getSyntheticId(e)) : false; + const endEventMatch = data.endEvent ? id === getSyntheticId(data.endEvent) : false; + return Boolean(id) && (instantEventsMatch || endEventMatch); + } + if (!eventsArePairable(triplet)) { + continue; + } + const targetEvent = endEvent || beginEvent; + const event = { + rawSourceEvent: beginEvent, + cat: targetEvent.cat, + ph: targetEvent.ph, + pid: targetEvent.pid, + tid: targetEvent.tid, + id, + // Both events have the same name, so it doesn't matter which we pick to + // use as the description + name: beginEvent.name, + dur: Types.Timing.MicroSeconds(targetEvent.ts - beginEvent.ts), + ts: beginEvent.ts, + args: { + data: triplet, + }, + }; + if (event.dur < 0) { + // We have seen in the backend that sometimes animation events get + // generated with multiple begin entries, or multiple end entries, and this + // can cause invalid data on the performance panel, so we drop them. + // crbug.com/1472375 + continue; + } + syntheticEventCallback?.(event); + syntheticEvents.push(event); + } + return syntheticEvents.sort((a, b) => a.ts - b.ts); +} +export function createMatchedSortedSyntheticEvents(unpairedAsyncEvents, syntheticEventCallback) { + const matchedPairs = matchEvents(unpairedAsyncEvents); + const syntheticEvents = createSortedSyntheticEvents(matchedPairs, syntheticEventCallback); + return syntheticEvents; +} +/** + * Different trace events return line/column numbers that are 1 or 0 indexed. + * This function knows which events return 1 indexed numbers and normalizes + * them. The UI expects 0 indexed line numbers, so that is what we return. + */ +export function getZeroIndexedLineAndColumnForEvent(event) { + // Some events emit line numbers that are 1 indexed, but the UI layer expects + // numbers to be 0 indexed. So here, if the event matches a known 1-indexed + // number event, we subtract one from the line and column numbers. + // Otherwise, if the event has args.data.lineNumber/colNumber, we return it + // as is. + const numbers = getRawLineAndColumnNumbersForEvent(event); + const { lineNumber, columnNumber } = numbers; + switch (event.name) { + // All these events have line/column numbers which are 1 indexed; so we + // subtract to make them 0 indexed. + case "FunctionCall" /* Types.TraceEvents.KnownEventName.FunctionCall */: + case "EvaluateScript" /* Types.TraceEvents.KnownEventName.EvaluateScript */: + case "v8.compile" /* Types.TraceEvents.KnownEventName.Compile */: + case "v8.produceCache" /* Types.TraceEvents.KnownEventName.CacheScript */: { + return { + lineNumber: typeof lineNumber === 'number' ? lineNumber - 1 : undefined, + columnNumber: typeof columnNumber === 'number' ? columnNumber - 1 : undefined, + }; + } + default: { + return numbers; + } + } +} +/** + * Different trace events contain stack traces with line/column numbers + * that are 1 or 0 indexed. + * This function knows which events return 1 indexed numbers and normalizes + * them. The UI expects 0 indexed line numbers, so that is what we return. + */ +export function getZeroIndexedStackTraceForEvent(event) { + const stack = stackTraceForEvent(event); + if (!stack) { + return null; + } + return stack.map(callFrame => { + const normalizedCallFrame = { ...callFrame }; + switch (event.name) { + case "ScheduleStyleRecalculation" /* Types.TraceEvents.KnownEventName.ScheduleStyleRecalculation */: + case "InvalidateLayout" /* Types.TraceEvents.KnownEventName.InvalidateLayout */: + case "UpdateLayoutTree" /* Types.TraceEvents.KnownEventName.UpdateLayoutTree */: { + normalizedCallFrame.lineNumber = callFrame.lineNumber && callFrame.lineNumber - 1; + normalizedCallFrame.columnNumber = callFrame.columnNumber && callFrame.columnNumber - 1; + } + } + return normalizedCallFrame; + }); +} +/** + * NOTE: you probably do not want this function! (Which is why it is not exported). + * + * Some trace events have 0 indexed line/column numbers, and others have 1 + * indexed. This function does NOT normalize them, but + * `getZeroIndexedLineAndColumnNumbersForEvent` does. It is best to use that! + * + * @see {@link getZeroIndexedLineAndColumnForEvent} + **/ +function getRawLineAndColumnNumbersForEvent(event) { + if (!event.args?.data) { + return { + lineNumber: undefined, + columnNumber: undefined, + }; + } + let lineNumber = undefined; + let columnNumber = undefined; + if ('lineNumber' in event.args.data && typeof event.args.data.lineNumber === 'number') { + lineNumber = event.args.data.lineNumber; + } + if ('columnNumber' in event.args.data && typeof event.args.data.columnNumber === 'number') { + columnNumber = event.args.data.columnNumber; + } + return { lineNumber, columnNumber }; +} +export function frameIDForEvent(event) { + // There are a few events (for example UpdateLayoutTree, ParseHTML) that have + // the frame stored in args.beginData + // Rather than list them all we just check for the presence of the field, so + // we are robust against future trace events also doing this. + // This check seems very robust, but it also helps satisfy TypeScript and + // prevents us against unexpected data. + if (event.args && 'beginData' in event.args && typeof event.args.beginData === 'object' && + event.args.beginData !== null && 'frame' in event.args.beginData && + typeof event.args.beginData.frame === 'string') { + return event.args.beginData.frame; + } + // Otherwise, we expect frame to be in args.data + if (event.args?.data?.frame) { + return event.args.data.frame; + } + // No known frame for this event. + return null; +} +const DevToolsTimelineEventCategory = 'disabled-by-default-devtools.timeline'; +export function isTopLevelEvent(event) { + if (event.name === 'JSRoot' && event.cat === 'toplevel') { + // This is used in TimelineJSProfile to insert a fake event prior to the + // CPU Profile in order to ensure the trace isn't truncated. So if we see + // this, we want to treat it as a top level event. + // TODO(crbug.com/341234884): do we need this? + return true; + } + return event.cat.includes(DevToolsTimelineEventCategory) && event.name === "RunTask" /* Types.TraceEvents.KnownEventName.RunTask */; +} +function topLevelEventIndexEndingAfter(events, time) { + let index = Platform.ArrayUtilities.upperBound(events, time, (time, event) => time - event.ts) - 1; + while (index > 0 && !isTopLevelEvent(events[index])) { + index--; + } + return Math.max(index, 0); +} +export function findUpdateLayoutTreeEvents(events, startTime, endTime) { + const foundEvents = []; + const startEventIndex = topLevelEventIndexEndingAfter(events, startTime); + for (let i = startEventIndex; i < events.length; i++) { + const event = events[i]; + if (!Types.TraceEvents.isTraceEventUpdateLayoutTree(event)) { + continue; + } + if (event.ts >= (endTime || Infinity)) { + continue; + } + foundEvents.push(event); + } + return foundEvents; +} +/** + * Iterates events in a tree hierarchically, from top to bottom, + * calling back on every event's start and end in the order + * dictated by the corresponding timestamp. + * + * Events are assumed to be in ascendent order by timestamp. + * + * Events with 0 duration are treated as instant events. These do not have a + * begin and end, but will be passed to the config.onInstantEvent callback as + * they are discovered. Do not provide this callback if you are not interested + * in them. + * + * For example, given this tree, the following callbacks + * are expected to be made in the following order + * |---------------A---------------| + * |------B------||-------D------| + * |---C---| + * + * 1. Start A + * 3. Start B + * 4. Start C + * 5. End C + * 6. End B + * 7. Start D + * 8. End D + * 9. End A + * + * By default, async events are skipped. This behaviour can be + * overriden making use of the config.ignoreAsyncEvents parameter. + */ +export function forEachEvent(events, config) { + const globalStartTime = config.startTime || Types.Timing.MicroSeconds(0); + const globalEndTime = config.endTime || Types.Timing.MicroSeconds(Infinity); + const ignoreAsyncEvents = config.ignoreAsyncEvents === false ? false : true; + const stack = []; + const startEventIndex = topLevelEventIndexEndingAfter(events, globalStartTime); + for (let i = startEventIndex; i < events.length; i++) { + const currentEvent = events[i]; + const currentEventTimings = eventTimingsMicroSeconds(currentEvent); + if (currentEventTimings.endTime < globalStartTime) { + continue; + } + if (currentEventTimings.startTime > globalEndTime) { + break; + } + const isIgnoredAsyncEvent = ignoreAsyncEvents && Types.TraceEvents.isAsyncPhase(currentEvent.ph); + if (isIgnoredAsyncEvent || Types.TraceEvents.isFlowPhase(currentEvent.ph)) { + continue; + } + // If we have now reached an event that is after a bunch of events, we need + // to call the onEndEvent callback for those events before moving on. + let lastEventOnStack = stack.at(-1); + let lastEventEndTime = lastEventOnStack ? eventTimingsMicroSeconds(lastEventOnStack).endTime : null; + while (lastEventOnStack && lastEventEndTime && lastEventEndTime <= currentEventTimings.startTime) { + stack.pop(); + config.onEndEvent(lastEventOnStack); + lastEventOnStack = stack.at(-1); + lastEventEndTime = lastEventOnStack ? eventTimingsMicroSeconds(lastEventOnStack).endTime : null; + } + // Now we have dealt with all events prior to this one, see if we need to care about this one. + if (config.eventFilter && !config.eventFilter(currentEvent)) { + // The user has chosen to filter this event out, so continue on and do nothing + continue; + } + if (currentEventTimings.duration) { + config.onStartEvent(currentEvent); + stack.push(currentEvent); + } + else if (config.onInstantEvent) { + // An event with 0 duration is an instant event. + config.onInstantEvent(currentEvent); + } + } + // Now we have finished looping over all events; any events remaining on the + // stack need to have their onEndEvent called. + while (stack.length) { + const last = stack.pop(); + if (last) { + config.onEndEvent(last); + } + } +} +// Parsed categories are cached to prevent calling cat.split() +// multiple times on the same categories string. +const parsedCategories = new Map(); +export function eventHasCategory(event, category) { + let parsedCategoriesForEvent = parsedCategories.get(event.cat); + if (!parsedCategoriesForEvent) { + parsedCategoriesForEvent = new Set(event.cat.split(',') || []); + } + return parsedCategoriesForEvent.has(category); +} +//# sourceMappingURL=Trace.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/helpers/TreeHelpers.js b/node_modules/@paulirish/trace_engine/models/trace/helpers/TreeHelpers.js new file mode 100644 index 000000000..d694edfbb --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/helpers/TreeHelpers.js @@ -0,0 +1,272 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Types from '../types/types.js'; +let nodeIdCount = 0; +export const makeTraceEntryNodeId = () => (++nodeIdCount); +export const makeEmptyTraceEntryTree = () => ({ + roots: new Set(), + maxDepth: 0, +}); +export const makeEmptyTraceEntryNode = (entry, id) => ({ + entry, + id, + parent: null, + children: [], + depth: 0, +}); +class TraceEntryNodeIdTag { + /* eslint-disable-next-line no-unused-private-class-members */ + #tag; +} +/** + * Builds a hierarchy of the entries (trace events and profile calls) in + * a particular thread of a particular process, assuming that they're + * sorted, by iterating through all of the events in order. + * + * The approach is analogous to how a parser would be implemented. A + * stack maintains local context. A scanner peeks and pops from the data + * stream. Various "tokens" (events) are treated as "whitespace" + * (ignored). + * + * The tree starts out empty and is populated as the hierarchy is built. + * The nodes are also assumed to be created empty, with no known parent + * or children. + * + * Complexity: O(n), where n = number of events + */ +export function treify(entries, options) { + // As we construct the tree, store a map of each entry to its node. This + // means if you are iterating over a list of RendererEntry events you can + // easily look up that node in the tree. + const entryToNode = new Map(); + const stack = []; + // Reset the node id counter for every new renderer. + nodeIdCount = -1; + const tree = makeEmptyTraceEntryTree(); + for (let i = 0; i < entries.length; i++) { + const event = entries[i]; + // If the current event should not be part of the tree, then simply proceed + // with the next event. + if (options && !options.filter.has(event.name)) { + continue; + } + const duration = event.dur || 0; + const nodeId = makeTraceEntryNodeId(); + const node = makeEmptyTraceEntryNode(event, nodeId); + // If the parent stack is empty, then the current event is a root. Create a + // node for it, mark it as a root, then proceed with the next event. + if (stack.length === 0) { + tree.roots.add(node); + event.selfTime = Types.Timing.MicroSeconds(duration); + stack.push(node); + tree.maxDepth = Math.max(tree.maxDepth, stack.length); + entryToNode.set(event, node); + continue; + } + const parentNode = stack.at(-1); + if (parentNode === undefined) { + throw new Error('Impossible: no parent node found in the stack'); + } + const parentEvent = parentNode.entry; + const begin = event.ts; + const parentBegin = parentEvent.ts; + const parentDuration = parentEvent.dur || 0; + const end = begin + duration; + const parentEnd = parentBegin + parentDuration; + // Check the relationship between the parent event at the top of the stack, + // and the current event being processed. There are only 4 distinct + // possiblities, only 2 of them actually valid, given the assumed sorting: + // 1. Current event starts before the parent event, ends whenever. (invalid) + // 2. Current event starts after the parent event, ends whenever. (valid) + // 3. Current event starts during the parent event, ends after. (invalid) + // 4. Current event starts and ends during the parent event. (valid) + // 1. If the current event starts before the parent event, then the data is + // not sorted properly, messed up some way, or this logic is incomplete. + const startsBeforeParent = begin < parentBegin; + if (startsBeforeParent) { + throw new Error('Impossible: current event starts before the parent event'); + } + // 2. If the current event starts after the parent event, then it's a new + // parent. Pop, then handle current event again. + const startsAfterParent = begin >= parentEnd; + if (startsAfterParent) { + stack.pop(); + i--; + // The last created node has been discarded, so discard this id. + nodeIdCount--; + continue; + } + // 3. If the current event starts during the parent event, but ends + // after it, then the data is messed up some way, for example a + // profile call was sampled too late after its start, ignore the + // problematic event. + const endsAfterParent = end > parentEnd; + if (endsAfterParent) { + continue; + } + // 4. The only remaining case is the common case, where the current event is + // contained within the parent event. Create a node for the current + // event, establish the parent/child relationship, then proceed with the + // next event. + node.depth = stack.length; + node.parent = parentNode; + parentNode.children.push(node); + event.selfTime = Types.Timing.MicroSeconds(duration); + if (parentEvent.selfTime !== undefined) { + parentEvent.selfTime = Types.Timing.MicroSeconds(parentEvent.selfTime - (event.dur || 0)); + } + stack.push(node); + tree.maxDepth = Math.max(tree.maxDepth, stack.length); + entryToNode.set(event, node); + } + return { tree, entryToNode }; +} +/** + * Iterates events in a tree hierarchically, from top to bottom, + * calling back on every event's start and end in the order + * as it traverses down and then up the tree. + * + * For example, given this tree, the following callbacks + * are expected to be made in the following order + * |---------------A---------------| + * |------B------||-------D------| + * |---C---| + * + * 1. Start A + * 3. Start B + * 4. Start C + * 5. End C + * 6. End B + * 7. Start D + * 8. End D + * 9. End A + * + */ +export function walkTreeFromEntry(entryToNode, rootEntry, onEntryStart, onEntryEnd) { + const startNode = entryToNode.get(rootEntry); + if (!startNode) { + return; + } + walkTreeByNode(entryToNode, startNode, onEntryStart, onEntryEnd); +} +/** + * Given a Helpers.TreeHelpers.RendererTree, this will iterates events in hierarchically, visiting + * each root node and working from top to bottom, calling back on every event's + * start and end in the order as it traverses down and then up the tree. + * + * For example, given this tree, the following callbacks + * are expected to be made in the following order + * |------------- Task A -------------||-- Task E --| + * |-- Task B --||-- Task D --| + * |- Task C -| + * + * 1. Start A + * 3. Start B + * 4. Start C + * 5. End C + * 6. End B + * 7. Start D + * 8. End D + * 9. End A + * 10. Start E + * 11. End E + * + */ +export function walkEntireTree(entryToNode, tree, onEntryStart, onEntryEnd, traceWindowToInclude, minDuration) { + for (const rootNode of tree.roots) { + walkTreeByNode(entryToNode, rootNode, onEntryStart, onEntryEnd, traceWindowToInclude, minDuration); + } +} +function walkTreeByNode(entryToNode, rootNode, onEntryStart, onEntryEnd, traceWindowToInclude, minDuration) { + if (traceWindowToInclude && !treeNodeIsInWindow(rootNode, traceWindowToInclude)) { + // If this node is not within the provided window, we can skip it. We also + // can skip all its children too, as we know they won't be in the window if + // their parent is not. + return; + } + if (typeof minDuration !== 'undefined') { + const duration = Types.Timing.MicroSeconds(rootNode.entry.ts + Types.Timing.MicroSeconds(rootNode.entry.dur || 0)); + if (duration < minDuration) { + return; + } + } + onEntryStart(rootNode.entry); + for (const child of rootNode.children) { + walkTreeByNode(entryToNode, child, onEntryStart, onEntryEnd, traceWindowToInclude, minDuration); + } + onEntryEnd(rootNode.entry); +} +/** + * Returns true if the provided node is partially or fully within the trace + * window. The entire node does not have to fit inside the window, but it does + * have to partially intersect it. + */ +function treeNodeIsInWindow(node, traceWindow) { + const startTime = node.entry.ts; + const endTime = node.entry.ts + (node.entry.dur || 0); + // Min ======= startTime ========= Max => node is within window + if (startTime >= traceWindow.min && startTime < traceWindow.max) { + return true; + } + // Min ======= endTime ========= Max => node is within window + if (endTime > traceWindow.min && endTime <= traceWindow.max) { + return true; + } + // startTime ==== Min ======== Max === endTime => node spans greater than the window so is in it. + if (startTime <= traceWindow.min && endTime >= traceWindow.max) { + return true; + } + return false; +} +/** + * Determines if the given events, which are assumed to be ordered can + * be organized into tree structures. + * This condition is met if there is *not* a pair of async events + * e1 and e2 where: + * + * e1.startTime < e2.startTime && e1.endTime > e2.startTime && e1.endTime < e2.endTime. + * or, graphically: + * |------- e1 ------| + * |------- e2 --------| + * + * Because a parent-child relationship cannot be made from the example + * above, a tree cannot be made from the set of events. + * + * Sync events from the same thread are tree-able by definition. + * + * Note that this will also return true if multiple trees can be + * built, for example if none of the events overlap with each other. + */ +export function canBuildTreesFromEvents(events) { + const stack = []; + for (const event of events) { + const startTime = event.ts; + const endTime = event.ts + (event.dur || 0); + let parent = stack.at(-1); + if (parent === undefined) { + stack.push(event); + continue; + } + let parentEndTime = parent.ts + (parent.dur || 0); + // Discard events that are not parents for this event. The parent + // is one whose end time is after this event start time. + while (stack.length && startTime >= parentEndTime) { + stack.pop(); + parent = stack.at(-1); + if (parent === undefined) { + break; + } + parentEndTime = parent.ts + (parent.dur || 0); + } + if (stack.length && endTime > parentEndTime) { + // If such an event exists but its end time is before this + // event's end time, then a tree cannot be made using this + // events. + return false; + } + stack.push(event); + } + return true; +} +//# sourceMappingURL=TreeHelpers.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/helpers/bundle-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/helpers/bundle-tsconfig.json new file mode 100644 index 000000000..d8510a060 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/helpers/bundle-tsconfig.json @@ -0,0 +1 @@ +{"compilerOptions":{"composite":true,"outDir":".","baseUrl":".","rootDir":"../../../../../../../front_end/models/trace/helpers"},"files":["../../../../../../../front_end/models/trace/helpers/helpers.ts"],"references":[{"path":"./helpers-tsconfig.json"}]} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/helpers/devtools_entrypoint-bundle-typescript-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/helpers/devtools_entrypoint-bundle-typescript-tsconfig.json new file mode 100644 index 000000000..17cd3270e --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/helpers/devtools_entrypoint-bundle-typescript-tsconfig.json @@ -0,0 +1,43 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../../front_end/models/trace/helpers", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "devtools_entrypoint-bundle-typescript-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../../front_end/models/trace/helpers/helpers.ts", + "../../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "./helpers-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/helpers/helpers-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/helpers/helpers-tsconfig.json new file mode 100644 index 000000000..289c83200 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/helpers/helpers-tsconfig.json @@ -0,0 +1,59 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../../front_end/models/trace/helpers", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "helpers-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../../front_end/models/trace/helpers/Extensions.ts", + "../../../../../../../front_end/models/trace/helpers/SamplesIntegrator.ts", + "../../../../../../../front_end/models/trace/helpers/Timing.ts", + "../../../../../../../front_end/models/trace/helpers/Trace.ts", + "../../../../../../../front_end/models/trace/helpers/TreeHelpers.ts", + "../../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "../../../core/common/bundle-tsconfig.json" + }, + { + "path": "../../../core/platform/bundle-tsconfig.json" + }, + { + "path": "../../../core/root/bundle-tsconfig.json" + }, + { + "path": "../../cpu_profile/bundle-tsconfig.json" + }, + { + "path": "../types/bundle-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/helpers/helpers.js b/node_modules/@paulirish/trace_engine/models/trace/helpers/helpers.js new file mode 100644 index 000000000..db55bbb46 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/helpers/helpers.js @@ -0,0 +1,9 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export * as Extensions from './Extensions.js'; +export * as SamplesIntegrator from './SamplesIntegrator.js'; +export * as Timing from './Timing.js'; +export * as Trace from './Trace.js'; +export * as TreeHelpers from './TreeHelpers.js'; +//# sourceMappingURL=helpers.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/insights/CumulativeLayoutShift.js b/node_modules/@paulirish/trace_engine/models/trace/insights/CumulativeLayoutShift.js new file mode 100644 index 000000000..9528b512c --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/insights/CumulativeLayoutShift.js @@ -0,0 +1,81 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Helpers from '../helpers/helpers.js'; +export function deps() { + return ['Meta', 'Animations']; +} +/** + * Each failure reason is represented by a bit flag. The bit shift operator '<<' is used to define + * which bit corresponds to each failure reason. + * https://source.chromium.org/search?q=f:compositor_animations.h%20%22enum%20FailureReason%22 + * @type {{flag: number, failure: AnimationFailureReasons}[]} + */ +const ACTIONABLE_FAILURE_REASONS = [ + { + flag: 1 << 13, + failure: "UNSUPPORTED_CSS_PROPERTY" /* AnimationFailureReasons.UNSUPPORTED_CSS_PROPERTY */, + }, + { + flag: 1 << 11, + failure: "TRANSFROM_BOX_SIZE_DEPENDENT" /* AnimationFailureReasons.TRANSFROM_BOX_SIZE_DEPENDENT */, + }, + { + flag: 1 << 12, + failure: "FILTER_MAY_MOVE_PIXELS" /* AnimationFailureReasons.FILTER_MAY_MOVE_PIXELS */, + }, + { + flag: 1 << 4, + failure: "NON_REPLACE_COMPOSITE_MODE" /* AnimationFailureReasons.NON_REPLACE_COMPOSITE_MODE */, + }, + { + flag: 1 << 6, + failure: "INCOMPATIBLE_ANIMATIONS" /* AnimationFailureReasons.INCOMPATIBLE_ANIMATIONS */, + }, + { + flag: 1 << 3, + failure: "UNSUPPORTED_TIMING_PARAMS" /* AnimationFailureReasons.UNSUPPORTED_TIMING_PARAMS */, + }, +]; +/** + * Returns a list of NoncompositedAnimationFailures. + */ +function getNonCompositedAnimations(animations) { + const failures = []; + for (const event of animations) { + const beginEvent = event.args.data.beginEvent; + const instantEvents = event.args.data.instantEvents || []; + /** + * Animation events containing composite information are ASYNC_NESTABLE_INSTANT ('n'). + * An animation may also contain multiple 'n' events, so we look through those with useful non-composited data. + */ + for (const event of instantEvents) { + const failureMask = event.args.data.compositeFailed; + const unsupportedProperties = event.args.data.unsupportedProperties; + if (!failureMask || !unsupportedProperties) { + continue; + } + const failureReasons = ACTIONABLE_FAILURE_REASONS.filter(reason => failureMask & reason.flag).map(reason => { + return reason.failure; + }); + const failure = { + name: beginEvent.args.data.displayName, + failureReasons, + unsupportedProperties, + }; + failures.push(failure); + } + } + return failures; +} +export function generateInsight(traceParsedData, context) { + const compositeAnimationEvents = traceParsedData.Animations.animations.filter(event => { + const nav = Helpers.Trace.getNavigationForTraceEvent(event, context.frameId, traceParsedData.Meta.navigationsByFrameId); + return nav?.args.data?.navigationId === context.navigationId; + }); + const animationFailures = getNonCompositedAnimations(compositeAnimationEvents); + return { + animationFailures, + }; +} +//# sourceMappingURL=CumulativeLayoutShift.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/insights/InsightRunners.js b/node_modules/@paulirish/trace_engine/models/trace/insights/InsightRunners.js new file mode 100644 index 000000000..073ee7705 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/insights/InsightRunners.js @@ -0,0 +1,9 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export * as CumulativeLayoutShift from './CumulativeLayoutShift.js'; +export * as InteractionToNextPaint from './InteractionToNextPaint.js'; +export * as LargestContentfulPaint from './LargestContentfulPaint.js'; +export * as RenderBlocking from './RenderBlocking.js'; +export * as Viewport from './Viewport.js'; +//# sourceMappingURL=InsightRunners.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/insights/InteractionToNextPaint.js b/node_modules/@paulirish/trace_engine/models/trace/insights/InteractionToNextPaint.js new file mode 100644 index 000000000..55396c32b --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/insights/InteractionToNextPaint.js @@ -0,0 +1,35 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export function deps() { + return ['UserInteractions']; +} +export function generateInsight(traceParsedData, context) { + const interactionEvents = traceParsedData.UserInteractions.interactionEvents.filter(event => { + return event.args.data.navigationId === context.navigationId; + }); + if (!interactionEvents.length) { + // A valid result, when there is no user interaction. + return {}; + } + const longestByInteractionId = new Map(); + for (const event of interactionEvents) { + const key = event.interactionId; + const longest = longestByInteractionId.get(key); + if (!longest || event.dur > longest.dur) { + longestByInteractionId.set(key, event); + } + } + const normalizedInteractionEvents = [...longestByInteractionId.values()]; + normalizedInteractionEvents.sort((a, b) => b.dur - a.dur); + // INP is the "nearest-rank"/inverted_cdf 98th percentile, except Chrome only + // keeps the 10 worst events around, so it can never be more than the 10th from + // last array element. To keep things simpler, sort desc and pick from front. + // See https://source.chromium.org/chromium/chromium/src/+/main:components/page_load_metrics/browser/responsiveness_metrics_normalization.cc;l=45-59;drc=cb0f9c8b559d9c7c3cb4ca94fc1118cc015d38ad + const highPercentileIndex = Math.min(9, Math.floor(normalizedInteractionEvents.length / 50)); + return { + longestInteractionEvent: normalizedInteractionEvents[0], + highPercentileInteractionEvent: normalizedInteractionEvents[highPercentileIndex], + }; +} +//# sourceMappingURL=InteractionToNextPaint.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/insights/LargestContentfulPaint.js b/node_modules/@paulirish/trace_engine/models/trace/insights/LargestContentfulPaint.js new file mode 100644 index 000000000..26407cb13 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/insights/LargestContentfulPaint.js @@ -0,0 +1,104 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Handlers from '../handlers/handlers.js'; +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +import { InsightWarning } from './types.js'; +export function deps() { + return ['NetworkRequests', 'PageLoadMetrics', 'LargestImagePaint', 'Meta']; +} +function breakdownPhases(nav, mainRequest, lcpMs, lcpRequest) { + const mainReqTiming = mainRequest.args.data.timing; + if (!mainReqTiming) { + throw new Error('no timing for main resource'); + } + const firstDocByteTs = Helpers.Timing.secondsToMicroseconds(mainReqTiming.requestTime) + + Helpers.Timing.millisecondsToMicroseconds(mainReqTiming.receiveHeadersStart); + const firstDocByteTiming = Types.Timing.MicroSeconds(firstDocByteTs - nav.ts); + const ttfb = Helpers.Timing.microSecondsToMilliseconds(firstDocByteTiming); + let renderDelay = Types.Timing.MilliSeconds(lcpMs - ttfb); + if (!lcpRequest) { + return { ttfb, renderDelay }; + } + const lcpStartTs = Types.Timing.MicroSeconds(lcpRequest.ts - nav.ts); + const resourceStart = Helpers.Timing.microSecondsToMilliseconds(lcpStartTs); + const lcpReqEndTs = Types.Timing.MicroSeconds(lcpRequest.args.data.syntheticData.finishTime - nav.ts); + const resourceEnd = Helpers.Timing.microSecondsToMilliseconds(lcpReqEndTs); + const loadDelay = Types.Timing.MilliSeconds(resourceStart - ttfb); + const loadTime = Types.Timing.MilliSeconds(resourceEnd - resourceStart); + renderDelay = Types.Timing.MilliSeconds(lcpMs - resourceEnd); + return { + ttfb, + loadDelay, + loadTime, + renderDelay, + }; +} +function findLCPRequest(traceParsedData, context, lcpEvent) { + const lcpNodeId = lcpEvent.args.data?.nodeId; + if (!lcpNodeId) { + throw new Error('no lcp node id'); + } + const imagePaint = traceParsedData.LargestImagePaint.get(lcpNodeId); + if (!imagePaint) { + return null; + } + const lcpUrl = imagePaint.args.data?.imageUrl; + if (!lcpUrl) { + throw new Error('no lcp url'); + } + // Look for the LCP resource. + const lcpResource = traceParsedData.NetworkRequests.byTime.find(req => { + const nav = Helpers.Trace.getNavigationForTraceEvent(req, context.frameId, traceParsedData.Meta.navigationsByFrameId); + return (nav?.args.data?.navigationId === context.navigationId) && (req.args.data.url === lcpUrl); + }); + if (!lcpResource) { + throw new Error('no lcp resource found'); + } + return lcpResource; +} +export function generateInsight(traceParsedData, context) { + const networkRequests = traceParsedData.NetworkRequests; + const nav = traceParsedData.Meta.navigationsByNavigationId.get(context.navigationId); + if (!nav) { + throw new Error('no trace navigation'); + } + const frameMetrics = traceParsedData.PageLoadMetrics.metricScoresByFrameId.get(context.frameId); + if (!frameMetrics) { + throw new Error('no frame metrics'); + } + const navMetrics = frameMetrics.get(context.navigationId); + if (!navMetrics) { + throw new Error('no navigation metrics'); + } + const metricScore = navMetrics.get("LCP" /* Handlers.ModelHandlers.PageLoadMetrics.MetricName.LCP */); + const lcpEvent = metricScore?.event; + if (!lcpEvent || !Types.TraceEvents.isTraceEventLargestContentfulPaintCandidate(lcpEvent)) { + return { warnings: [InsightWarning.NO_LCP] }; + } + const lcpTiming = metricScore.timing; + const lcpMs = Helpers.Timing.microSecondsToMilliseconds(lcpTiming); + const lcpResource = findLCPRequest(traceParsedData, context, lcpEvent); + const mainReq = networkRequests.byTime.find(req => req.args.data.requestId === context.navigationId); + if (!mainReq) { + return { lcpMs, warnings: [InsightWarning.NO_DOCUMENT_REQUEST] }; + } + if (!lcpResource) { + return { + lcpMs, + phases: breakdownPhases(nav, mainReq, lcpMs, lcpResource), + }; + } + const imageLoadingAttr = lcpEvent.args.data?.loadingAttr; + const imagePreloaded = lcpResource?.args.data.isLinkPreload || lcpResource?.args.data.initiator?.type === 'preload'; + const imageFetchPriorityHint = lcpResource?.args.data.fetchPriorityHint; + return { + lcpMs, + phases: breakdownPhases(nav, mainReq, lcpMs, lcpResource), + shouldRemoveLazyLoading: imageLoadingAttr === 'lazy', + shouldIncreasePriorityHint: imageFetchPriorityHint !== 'high', + shouldPreloadImage: !imagePreloaded, + }; +} +//# sourceMappingURL=LargestContentfulPaint.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/insights/RenderBlocking.js b/node_modules/@paulirish/trace_engine/models/trace/insights/RenderBlocking.js new file mode 100644 index 000000000..5ab995b2d --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/insights/RenderBlocking.js @@ -0,0 +1,56 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Handlers from '../handlers/handlers.js'; +import * as Helpers from '../helpers/helpers.js'; +import { InsightWarning } from './types.js'; +export function deps() { + return ['NetworkRequests', 'PageLoadMetrics']; +} +export function generateInsight(traceParsedData, context) { + const firstPaintTs = traceParsedData.PageLoadMetrics.metricScoresByFrameId.get(context.frameId) + ?.get(context.navigationId) + ?.get("FP" /* Handlers.ModelHandlers.PageLoadMetrics.MetricName.FP */) + ?.event?.ts; + if (!firstPaintTs) { + return { + renderBlockingRequests: [], + warnings: [InsightWarning.NO_FP], + }; + } + const renderBlockingRequests = []; + for (const req of traceParsedData.NetworkRequests.byTime) { + if (req.args.data.frame !== context.frameId) { + continue; + } + if (req.args.data.renderBlocking !== 'blocking' && req.args.data.renderBlocking !== 'in_body_parser_blocking') { + continue; + } + if (req.args.data.syntheticData.finishTime > firstPaintTs) { + continue; + } + // If a resource is marked `in_body_parser_blocking` it should only be considered render blocking if it is a + // high enough priority. Some resources (e.g. scripts) are not marked as high priority if they are fetched + // after a non-preloaded image. (See "early" definition in https://web.dev/articles/fetch-priority) + // + // There are edge cases and exceptions (e.g. priority hints) but this gives us the best approximation + // of render blocking resources in the document body. + if (req.args.data.renderBlocking === 'in_body_parser_blocking') { + const priority = req.args.data.priority; + const isScript = req.args.data.resourceType === "Script" /* Protocol.Network.ResourceType.Script */; + const isBlockingScript = isScript && priority === "High" /* Protocol.Network.ResourcePriority.High */; + if (priority !== "VeryHigh" /* Protocol.Network.ResourcePriority.VeryHigh */ && !isBlockingScript) { + continue; + } + } + const navigation = Helpers.Trace.getNavigationForTraceEvent(req, context.frameId, traceParsedData.Meta.navigationsByFrameId); + if (navigation?.args.data?.navigationId !== context.navigationId) { + continue; + } + renderBlockingRequests.push(req); + } + return { + renderBlockingRequests, + }; +} +//# sourceMappingURL=RenderBlocking.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/insights/Viewport.js b/node_modules/@paulirish/trace_engine/models/trace/insights/Viewport.js new file mode 100644 index 000000000..8d7dc5b60 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/insights/Viewport.js @@ -0,0 +1,39 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Helpers from '../helpers/helpers.js'; +import { InsightWarning } from './types.js'; +export function deps() { + return ['Meta', 'UserInteractions']; +} +export function generateInsight(traceParsedData, context) { + const events = traceParsedData.UserInteractions.beginCommitCompositorFrameEvents.filter(event => { + if (event.args.frame !== context.frameId) { + return false; + } + const navigation = Helpers.Trace.getNavigationForTraceEvent(event, context.frameId, traceParsedData.Meta.navigationsByFrameId); + if (navigation?.args.data?.navigationId !== context.navigationId) { + return false; + } + return true; + }); + if (!events.length) { + // Trace doesn't have the data we need. + return { + mobileOptimized: null, + warnings: [InsightWarning.NO_LAYOUT], + }; + } + // Returns true only if all events are mobile optimized. + for (const event of events) { + if (!event.args.is_mobile_optimized) { + return { + mobileOptimized: false, + }; + } + } + return { + mobileOptimized: true, + }; +} +//# sourceMappingURL=Viewport.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/insights/bundle-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/insights/bundle-tsconfig.json new file mode 100644 index 000000000..2f020ef50 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/insights/bundle-tsconfig.json @@ -0,0 +1 @@ +{"compilerOptions":{"composite":true,"outDir":".","baseUrl":".","rootDir":"../../../../../../../front_end/models/trace/insights"},"files":["../../../../../../../front_end/models/trace/insights/insights.ts"],"references":[{"path":"./insights-tsconfig.json"}]} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/insights/devtools_entrypoint-bundle-typescript-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/insights/devtools_entrypoint-bundle-typescript-tsconfig.json new file mode 100644 index 000000000..572746522 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/insights/devtools_entrypoint-bundle-typescript-tsconfig.json @@ -0,0 +1,43 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../../front_end/models/trace/insights", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "devtools_entrypoint-bundle-typescript-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../../front_end/models/trace/insights/insights.ts", + "../../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "./insights-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/insights/insights-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/insights/insights-tsconfig.json new file mode 100644 index 000000000..29647276e --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/insights/insights-tsconfig.json @@ -0,0 +1,49 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../../front_end/models/trace/insights", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "insights-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../../front_end/models/trace/insights/CumulativeLayoutShift.ts", + "../../../../../../../front_end/models/trace/insights/InsightRunners.ts", + "../../../../../../../front_end/models/trace/insights/InteractionToNextPaint.ts", + "../../../../../../../front_end/models/trace/insights/LargestContentfulPaint.ts", + "../../../../../../../front_end/models/trace/insights/RenderBlocking.ts", + "../../../../../../../front_end/models/trace/insights/Viewport.ts", + "../../../../../../../front_end/models/trace/insights/types.ts", + "../../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "../handlers/bundle-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/insights/insights.js b/node_modules/@paulirish/trace_engine/models/trace/insights/insights.js new file mode 100644 index 000000000..756ecb420 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/insights/insights.js @@ -0,0 +1,6 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export * as InsightRunners from './InsightRunners.js'; +export * as Types from './types.js'; +//# sourceMappingURL=insights.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/insights/types.js b/node_modules/@paulirish/trace_engine/models/trace/insights/types.js new file mode 100644 index 000000000..757a34f21 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/insights/types.js @@ -0,0 +1,12 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export var InsightWarning; +(function (InsightWarning) { + InsightWarning["NO_FP"] = "NO_FP"; + InsightWarning["NO_LCP"] = "NO_LCP"; + // No network request could be identified as the primary HTML document. + InsightWarning["NO_DOCUMENT_REQUEST"] = "NO_DOCUMENT_REQUEST"; + InsightWarning["NO_LAYOUT"] = "NO_LAYOUT"; +})(InsightWarning || (InsightWarning = {})); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/root-causes/LayoutShift.js b/node_modules/@paulirish/trace_engine/models/trace/root-causes/LayoutShift.js new file mode 100644 index 000000000..ef413a73a --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/root-causes/LayoutShift.js @@ -0,0 +1,537 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as Platform from '../../../core/platform/platform.js'; +import * as Helpers from '../helpers/helpers.js'; +import * as Types from '../types/types.js'; +const fontRequestsByPrePaint = new Map(); +const renderBlocksByPrePaint = new Map(); +function setDefaultValue(map, shift) { + Platform.MapUtilities.getWithDefault(map, shift, () => { + return { + unsizedMedia: [], + iframes: [], + fontChanges: [], + renderBlockingRequests: [], + scriptStackTrace: [], + }; + }); +} +// Important: we purposefully treat `potentially_blocking` as +// non-render-blocking here because: +// 1. An async script can run on the main thread at any point, including before +// the page is loaded +// 2. An async script will never block the parsing and rendering process of the +// browser. +// 3. Therefore, from a developer's point of view, there is nothing more they +// can do if they've put `async` on, and within the context of Insights, we +// shouldn't report an async script as render blocking. +// In the future we may want to consider suggesting the use of `defer` over +// `async`, as it doesn't have this concern, but for now we'll allow `async` +// and not report it as an issue. +const NON_RENDER_BLOCKING_VALUES = new Set([ + 'non_blocking', + 'potentially_blocking', +]); +function networkRequestIsRenderBlockingInFrame(event, frameId) { + const isRenderBlocking = !NON_RENDER_BLOCKING_VALUES.has(event.args.data.renderBlocking); + return isRenderBlocking && event.args.data.frame === frameId; +} +export class LayoutShiftRootCauses { + #protocolInterface; + #rootCauseCacheMap = new Map(); + #nodeDetailsCache = new Map(); + #iframeRootCausesEnabled; + constructor(protocolInterface, options) { + this.#protocolInterface = protocolInterface; + this.#iframeRootCausesEnabled = options?.enableIframeRootCauses ?? false; + } + /** + * Calculates the potential root causes for a given layout shift event. Once + * calculated, this data is cached. + * Note: because you need all layout shift data at once to calculate these + * correctly, this function will parse the root causes for _all_ layout shift + * events the first time that it's called. That then populates the cache for + * each shift, so any subsequent calls are just a constant lookup. + */ + async rootCausesForEvent(modelData, event) { + const cachedResult = this.#rootCauseCacheMap.get(event); + if (cachedResult) { + return cachedResult; + } + const allLayoutShifts = modelData.LayoutShifts.clusters.flatMap(cluster => cluster.events); + // Make sure a value in the cache is set even for shifts that don't have a root cause, + // so that we don't have to recompute when no root causes are found. In case a cause + // for a shift is found, the default value is replaced. + allLayoutShifts.forEach(shift => setDefaultValue(this.#rootCauseCacheMap, shift)); + // Populate the cache + await this.blameShifts(allLayoutShifts, modelData); + const resultForEvent = this.#rootCauseCacheMap.get(event); + if (!resultForEvent) { + // No root causes available for this layout shift. + return null; + } + return resultForEvent; + } + /** + * Determines potential root causes for shifts + */ + async blameShifts(layoutShifts, modelData) { + await this.linkShiftsToLayoutInvalidations(layoutShifts, modelData); + this.linkShiftsToLayoutEvents(layoutShifts, modelData); + } + /** + * "LayoutInvalidations" are a set of trace events dispatched in Blink under the name + * "layoutInvalidationTracking", which track invalidations on the "Layout"stage of the + * rendering pipeline. This function utilizes this event to flag potential root causes + * to layout shifts. + */ + async linkShiftsToLayoutInvalidations(layoutShifts, modelData) { + const { prePaintEvents, layoutInvalidationEvents, scheduleStyleInvalidationEvents, backendNodeIds } = modelData.LayoutShifts; + // For the purposes of determining root causes of layout shifts, we + // consider scheduleStyleInvalidationTracking and + // LayoutInvalidationTracking events as events that could have been the + // cause of the layout shift. + const eventsForLayoutInvalidation = [...layoutInvalidationEvents, ...scheduleStyleInvalidationEvents]; + const nodes = await this.#protocolInterface.pushNodesByBackendIdsToFrontend(backendNodeIds); + const nodeIdsByBackendIdMap = new Map(); + for (let i = 0; i < backendNodeIds.length; i++) { + nodeIdsByBackendIdMap.set(backendNodeIds[i], nodes[i]); + } + // Maps from PrePaint events to LayoutShifts that occured in each one. + const shiftsByPrePaint = getShiftsByPrePaintEvents(layoutShifts, prePaintEvents); + for (const layoutInvalidation of eventsForLayoutInvalidation) { + // Get the first PrePaint event that happened after the current LayoutInvalidation event. + const nextPrePaintIndex = Platform.ArrayUtilities.nearestIndexFromBeginning(prePaintEvents, prePaint => prePaint.ts > layoutInvalidation.ts); + if (nextPrePaintIndex === null) { + // No PrePaint event registered after this LayoutInvalidation. Continue. + continue; + } + const nextPrePaint = prePaintEvents[nextPrePaintIndex]; + const subsequentShifts = shiftsByPrePaint.get(nextPrePaint); + if (!subsequentShifts) { + // The PrePaint after the current LayoutInvalidation doesn't contain shifts. + continue; + } + const fontChangeRootCause = this.getFontChangeRootCause(layoutInvalidation, nextPrePaint, modelData); + const renderBlockRootCause = this.getRenderBlockRootCause(layoutInvalidation, nextPrePaint, modelData); + const layoutInvalidationNodeId = nodeIdsByBackendIdMap.get(layoutInvalidation.args.data.nodeId); + let unsizedMediaRootCause = null; + let iframeRootCause = null; + if (layoutInvalidationNodeId !== undefined && + Types.TraceEvents.isTraceEventLayoutInvalidationTracking(layoutInvalidation)) { + unsizedMediaRootCause = await this.getUnsizedMediaRootCause(layoutInvalidation, layoutInvalidationNodeId); + iframeRootCause = await this.getIframeRootCause(layoutInvalidation, layoutInvalidationNodeId); + } + if (!unsizedMediaRootCause && !iframeRootCause && !fontChangeRootCause && !renderBlockRootCause) { + continue; + } + // Add found potential root causes to all the shifts in this PrePaint and populate the cache. + for (const shift of subsequentShifts) { + const rootCausesForShift = Platform.MapUtilities.getWithDefault(this.#rootCauseCacheMap, shift, () => { + return { + unsizedMedia: [], + iframes: [], + fontChanges: [], + renderBlockingRequests: [], + scriptStackTrace: [], + }; + }); + if (unsizedMediaRootCause && + !rootCausesForShift.unsizedMedia.some(media => media.node.nodeId === unsizedMediaRootCause?.node.nodeId) && + shift.args.frame === layoutInvalidation.args.data.frame) { + rootCausesForShift.unsizedMedia.push(unsizedMediaRootCause); + } + if (iframeRootCause && + !rootCausesForShift.iframes.some(injectedIframe => injectedIframe.iframe.nodeId === iframeRootCause?.iframe.nodeId)) { + rootCausesForShift.iframes.push(iframeRootCause); + } + if (fontChangeRootCause) { + // Unlike other root causes, we calculate fonts causing a shift only once, + // which means we assign the built array instead of appending new objects + // to it. + rootCausesForShift.fontChanges = fontChangeRootCause; + } + if (renderBlockRootCause) { + rootCausesForShift.renderBlockingRequests = renderBlockRootCause; + } + } + } + } + /** + * For every shift looks up the initiator of its corresponding Layout event. This initiator + * is assigned by the RendererHandler and contains the stack trace of the point in a script + * that caused a style recalculation or a relayout. This stack trace is added to the shift's + * potential root causes. + * Note that a Layout cannot always be linked to a script, in that case, we cannot add a + * "script causing reflow" as a potential root cause to the corresponding shift. + */ + linkShiftsToLayoutEvents(layoutShifts, modelData) { + const { prePaintEvents } = modelData.LayoutShifts; + // Maps from PrePaint events to LayoutShifts that occured in each one. + const shiftsByPrePaint = getShiftsByPrePaintEvents(layoutShifts, prePaintEvents); + const eventTriggersLayout = ({ name }) => { + const knownName = name; + return knownName === "Layout" /* Types.TraceEvents.KnownEventName.Layout */; + }; + const layoutEvents = modelData.Renderer.allTraceEntries.filter(eventTriggersLayout); + for (const layout of layoutEvents) { + // Get the first PrePaint event that happened after the current layout event. + const nextPrePaintIndex = Platform.ArrayUtilities.nearestIndexFromBeginning(prePaintEvents, prePaint => prePaint.ts > layout.ts + (layout.dur || 0)); + if (nextPrePaintIndex === null) { + // No PrePaint event registered after this LayoutInvalidation. Continue. + continue; + } + const nextPrePaint = prePaintEvents[nextPrePaintIndex]; + const subsequentShifts = shiftsByPrePaint.get(nextPrePaint); + if (!subsequentShifts) { + // The PrePaint after the current LayoutInvalidation doesn't contain shifts. + continue; + } + const layoutNode = modelData.Renderer.entryToNode.get(layout); + const initiator = layoutNode ? modelData.Initiators.eventToInitiator.get(layoutNode.entry) : null; + const stackTrace = initiator?.args?.data?.stackTrace; + if (!stackTrace) { + continue; + } + // Add found potential root causes to all the shifts in this PrePaint and populate the cache. + for (const shift of subsequentShifts) { + const rootCausesForShift = Platform.MapUtilities.getWithDefault(this.#rootCauseCacheMap, shift, () => { + return { + unsizedMedia: [], + iframes: [], + fontChanges: [], + renderBlockingRequests: [], + scriptStackTrace: [], + }; + }); + if (rootCausesForShift.scriptStackTrace.length === 0) { + rootCausesForShift.scriptStackTrace = stackTrace; + } + } + } + } + /** + * Given a LayoutInvalidation trace event, determines if it was dispatched + * because a media element without dimensions was resized. + */ + async getUnsizedMediaRootCause(layoutInvalidation, layoutInvalidationNodeId) { + // Filter events to resizes only. + if (layoutInvalidation.args.data.reason !== "Size changed" /* Types.TraceEvents.LayoutInvalidationReason.SIZE_CHANGED */) { + return null; + } + const layoutInvalidationNode = await this.getNodeDetails(layoutInvalidationNodeId); + if (!layoutInvalidationNode) { + return null; + } + const computedStylesList = await this.#protocolInterface.getComputedStyleForNode(layoutInvalidationNode.nodeId); + const computedStyles = new Map(computedStylesList.map(item => [item.name, item.value])); + if (computedStyles && !(await nodeIsUnfixedMedia(layoutInvalidationNode, computedStyles))) { + return null; + } + const authoredDimensions = await this.getNodeAuthoredDimensions(layoutInvalidationNode); + if (dimensionsAreExplicit(authoredDimensions)) { + return null; + } + const computedDimensions = computedStyles ? getNodeComputedDimensions(computedStyles) : {}; + return { node: layoutInvalidationNode, authoredDimensions, computedDimensions }; + } + /** + * Given a LayoutInvalidation trace event, determines if it was dispatched + * because a node, which is an ancestor to an iframe, was injected. + */ + async getIframeRootCause(layoutInvalidation, layoutInvalidationNodeId) { + if (!this.#iframeRootCausesEnabled) { + return null; + } + if (!layoutInvalidation.args.data.nodeName?.startsWith('IFRAME') && + layoutInvalidation.args.data.reason !== "Style changed" /* Types.TraceEvents.LayoutInvalidationReason.STYLE_CHANGED */ && + layoutInvalidation.args.data.reason !== "Added to layout" /* Types.TraceEvents.LayoutInvalidationReason.ADDED_TO_LAYOUT */) { + return null; + } + const layoutInvalidationNode = await this.getNodeDetails(layoutInvalidationNodeId); + if (!layoutInvalidationNode) { + return null; + } + const iframe = firstIframeInDOMTree(layoutInvalidationNode); + if (!iframe) { + return null; + } + return { iframe }; + } + async getNodeDetails(nodeId) { + let nodeDetails = this.#nodeDetailsCache.get(nodeId); + if (nodeDetails !== undefined) { + return nodeDetails; + } + nodeDetails = await this.#protocolInterface.getNode(nodeId); + this.#nodeDetailsCache.set(nodeId, nodeDetails); + return nodeDetails; + } + /** + * Given a layout invalidation event and a sorted array, returns the subset of requests that arrived within a + * 500ms window before the layout invalidation. + */ + requestsInInvalidationWindow(layoutInvalidation, modelData) { + const requestsSortedByEndTime = modelData.NetworkRequests.byTime.sort((req1, req2) => { + const req1EndTime = req1.ts + req1.dur; + const req2EndTime = req2.ts + req2.dur; + return req1EndTime - req2EndTime; + }); + const lastRequestIndex = Platform.ArrayUtilities.nearestIndexFromEnd(requestsSortedByEndTime, request => request.ts + request.dur < layoutInvalidation.ts); + if (lastRequestIndex === null) { + return []; + } + const MAX_DELTA_FOR_FONT_REQUEST = Helpers.Timing.secondsToMicroseconds(Types.Timing.Seconds(0.5)); + const requestsInInvalidationWindow = []; + // Get all requests finished within the valid window. + for (let i = lastRequestIndex; i > -1; i--) { + const previousRequest = requestsSortedByEndTime[i]; + const previousRequestEndTime = previousRequest.ts + previousRequest.dur; + if (layoutInvalidation.ts - previousRequestEndTime < MAX_DELTA_FOR_FONT_REQUEST) { + const requestInInvalidationWindow = { request: previousRequest }; + const initiator = this.#protocolInterface.getInitiatorForRequest(previousRequest.args.data.url); + requestInInvalidationWindow.initiator = initiator || undefined; + requestsInInvalidationWindow.push(requestInInvalidationWindow); + } + else { + // No more requests fit in the time window. + break; + } + } + return requestsInInvalidationWindow; + } + /** + * Given a LayoutInvalidation trace event, determines if it was dispatched + * because fonts were changed and if so returns the information of all network + * request with which the fonts were possibly fetched, if any. The computed + * network requests are cached for the corresponding prepaint event, meaning + * that other LayoutInvalidation events that correspond to the same prepaint + * are not processed and the cached network requests for the prepaint is + * returned instead. + */ + getFontChangeRootCause(layoutInvalidation, nextPrePaint, modelData) { + if (layoutInvalidation.args.data.reason !== "Fonts changed" /* Types.TraceEvents.LayoutInvalidationReason.FONTS_CHANGED */) { + return null; + } + // Prevent computing the result of this function multiple times per PrePaint event. + const fontRequestsForPrepaint = fontRequestsByPrePaint.get(nextPrePaint); + if (fontRequestsForPrepaint !== undefined) { + return fontRequestsForPrepaint; + } + const fontRequestsInThisPrepaint = this.getFontRequestsInInvalidationWindow(this.requestsInInvalidationWindow(layoutInvalidation, modelData)); + fontRequestsByPrePaint.set(nextPrePaint, fontRequestsInThisPrepaint); + return fontRequestsInThisPrepaint; + } + /** + * Given the requests that arrived within a 500ms window before the layout invalidation, returns the font + * requests of them. + */ + getFontRequestsInInvalidationWindow(requestsInInvalidationWindow) { + const fontRequests = []; + // Get all requests finished within the valid window. + for (let i = 0; i < requestsInInvalidationWindow.length; i++) { + const fontRequest = requestsInInvalidationWindow[i]; + if (!fontRequest.request.args.data.mimeType.startsWith('font')) { + continue; + } + const fontFace = this.#protocolInterface.fontFaceForSource(fontRequest.request.args.data.url); + if (!fontFace || fontFace.fontDisplay === 'optional') { + // Setting font-display to optional is part of what the developer + // can do to avoid layout shifts due to FOIT/FOUT, as such we cannot + // suggest any actionable insight here. + continue; + } + fontRequest.fontFace = fontFace; + fontRequests.push(fontRequest); + } + return fontRequests; + } + /** + * Given a LayoutInvalidation trace event, determines if it arrived within a 500ms window before the layout + * invalidation and if so returns the information of all network request, if any. The computed network + * requests are cached for the corresponding prepaint event, meaning that other LayoutInvalidation events + * that correspond to the same prepaint are not processed and the cached network requests for the prepaint is + * returned instead. + */ + getRenderBlockRootCause(layoutInvalidation, nextPrePaint, modelData) { + // Prevent computing the result of this function multiple times per PrePaint event. + const renderBlocksInPrepaint = renderBlocksByPrePaint.get(nextPrePaint); + if (renderBlocksInPrepaint !== undefined) { + return renderBlocksInPrepaint; + } + const renderBlocksInThisPrepaint = getRenderBlockRequestsInInvalidationWindow(this.requestsInInvalidationWindow(layoutInvalidation, modelData)); + renderBlocksByPrePaint.set(nextPrePaint, renderBlocksInThisPrepaint); + return renderBlocksInThisPrepaint; + } + /** + * Returns a function that retrieves the active value of a given + * CSS property within the matched styles of the param node. + * The first occurence within the matched styles is returned and the + * value is looked up in the following order, which follows CSS + * specificity: + * 1. Inline styles. + * 2. CSS rules matching this node, from all applicable stylesheets. + * 3. Attribute defined styles. + */ + async nodeMatchedStylesPropertyGetter(node) { + const response = await this.#protocolInterface.getMatchedStylesForNode(node.nodeId); + function cssPropertyValueGetter(cssProperty) { + let prop = response.inlineStyle?.cssProperties.find(prop => prop.name === cssProperty); + if (prop) { + return prop.value; + } + for (const { rule } of response.matchedCSSRules || []) { + const prop = rule.style.cssProperties.find(prop => prop.name === cssProperty); + if (prop) { + return prop.value; + } + } + prop = response.attributesStyle?.cssProperties.find(prop => prop.name === cssProperty); + if (prop) { + return prop.value; + } + return null; + } + return cssPropertyValueGetter; + } + /** + * Returns the CSS dimensions set to the node from its matched styles. + */ + async getNodeAuthoredDimensions(node) { + const authoredDimensions = {}; + const cssMatchedRulesGetter = await this.nodeMatchedStylesPropertyGetter(node); + if (!cssMatchedRulesGetter) { + return authoredDimensions; + } + const attributesFlat = node.attributes || []; + const attributes = []; + for (let i = 0; i < attributesFlat.length; i += 2) { + attributes.push({ name: attributesFlat[i], value: attributesFlat[i + 1] }); + } + const htmlHeight = attributes.find(attr => attr.name === 'height' && htmlAttributeIsExplicit(attr)); + const htmlWidth = attributes.find(attr => attr.name === 'width' && htmlAttributeIsExplicit(attr)); + const cssExplicitAspectRatio = cssMatchedRulesGetter('aspect-ratio') || undefined; + if (htmlHeight && htmlWidth && cssExplicitAspectRatio) { + return { height: htmlHeight.value, width: htmlWidth.value, aspectRatio: cssExplicitAspectRatio }; + } + const cssHeight = cssMatchedRulesGetter('height') || undefined; + const cssWidth = cssMatchedRulesGetter('width') || undefined; + return { height: cssHeight, width: cssWidth, aspectRatio: cssExplicitAspectRatio }; + } +} +/** + * Given the requests that arrived within a 500ms window before the layout invalidation, returns the render + * block requests of them. + */ +function getRenderBlockRequestsInInvalidationWindow(requestsInInvalidationWindow) { + const renderBlockingRequests = []; + // Get all requests finished within the valid window. + for (let i = 0; i < requestsInInvalidationWindow.length; i++) { + const mainFrameId = requestsInInvalidationWindow[i].request.args.data.frame; + if (!networkRequestIsRenderBlockingInFrame(requestsInInvalidationWindow[i].request, mainFrameId)) { + continue; + } + renderBlockingRequests.push(requestsInInvalidationWindow[i]); + } + return renderBlockingRequests; +} +function firstIframeInDOMTree(root) { + if (root.nodeName === 'IFRAME') { + return root; + } + const children = root.children; + if (!children) { + return null; + } + for (const child of children) { + const iFrameInChild = firstIframeInDOMTree(child); + if (iFrameInChild) { + return iFrameInChild; + } + } + return null; +} +function cssPropertyIsExplicitlySet(propertyValue) { + return !['auto', 'initial', 'unset', 'inherit'].includes(propertyValue); +} +function htmlAttributeIsExplicit(attr) { + return parseInt(attr.value, 10) >= 0; +} +function computedStyleHasBackroundImage(computedStyle) { + const CSS_URL_REGEX = /^url\("([^"]+)"\)$/; + const backgroundImage = computedStyle.get('background-image'); + if (!backgroundImage) { + return false; + } + return CSS_URL_REGEX.test(backgroundImage); +} +function computedStyleHasFixedPosition(computedStyle) { + const position = computedStyle.get('position'); + if (!position) { + return false; + } + return position === 'fixed' || position === 'absolute'; +} +function getNodeComputedDimensions(computedStyle) { + const computedDimensions = {}; + computedDimensions.height = computedStyle.get('height'); + computedDimensions.width = computedStyle.get('width'); + computedDimensions.aspectRatio = computedStyle.get('aspect-ratio'); + return computedDimensions; +} +/** + * Determines if a node is a media element and is not fixed positioned + * (i.e. "position: fixed;" or "position: absolute;") + */ +async function nodeIsUnfixedMedia(node, computedStyle) { + const localName = node.localName; + const isBackgroundImage = computedStyleHasBackroundImage(computedStyle); + if (localName !== 'img' && localName !== 'video' && !isBackgroundImage) { + // Not a media element. + return false; + } + const isFixed = computedStyleHasFixedPosition(computedStyle); + return !isFixed; +} +/** + * Determines if a CSS dimensions object explicitly defines both width and height + * (i.e. not set to auto, inherit, etc.) + */ +function dimensionsAreExplicit(dimensions) { + const { height, width, aspectRatio } = dimensions; + const explicitHeight = Boolean(height && cssPropertyIsExplicitlySet(height)); + const explicitWidth = Boolean(width && cssPropertyIsExplicitlySet(width)); + const explicitAspectRatio = Boolean(aspectRatio && cssPropertyIsExplicitlySet(aspectRatio)); + const explicitWithAR = (explicitHeight || explicitWidth) && explicitAspectRatio; + return (explicitHeight && explicitWidth) || explicitWithAR; +} +/** + * Given an array of layout shift and PrePaint events, returns a mapping from + * PrePaint events to layout shifts dispatched within it. + */ +function getShiftsByPrePaintEvents(layoutShifts, prePaintEvents) { + // Maps from PrePaint events to LayoutShifts that occured in each one. + const shiftsByPrePaint = new Map(); + // Associate all shifts to their corresponding PrePaint. + for (const prePaintEvent of prePaintEvents) { + const firstShiftIndex = Platform.ArrayUtilities.nearestIndexFromBeginning(layoutShifts, shift => shift.ts >= prePaintEvent.ts); + if (firstShiftIndex === null) { + // No layout shifts registered after this PrePaint start. Continue. + continue; + } + for (let i = firstShiftIndex; i < layoutShifts.length; i++) { + const shift = layoutShifts[i]; + if (shift.ts >= prePaintEvent.ts && shift.ts <= prePaintEvent.ts + prePaintEvent.dur) { + const shiftsInPrePaint = Platform.MapUtilities.getWithDefault(shiftsByPrePaint, prePaintEvent, () => []); + shiftsInPrePaint.push(shift); + } + if (shift.ts > prePaintEvent.ts + prePaintEvent.dur) { + // Reached the end of this PrePaint. Continue to the next one. + break; + } + } + } + return shiftsByPrePaint; +} +//# sourceMappingURL=LayoutShift.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/root-causes/RootCauses.js b/node_modules/@paulirish/trace_engine/models/trace/root-causes/RootCauses.js new file mode 100644 index 000000000..caadd6fa1 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/root-causes/RootCauses.js @@ -0,0 +1,12 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import { LayoutShiftRootCauses } from './LayoutShift.js'; +export class RootCauses { + layoutShifts; + constructor(protocolInterface) { + this.layoutShifts = new LayoutShiftRootCauses(protocolInterface); + } +} +export { LayoutShiftRootCauses } from './LayoutShift.js'; +//# sourceMappingURL=RootCauses.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/root-causes/bundle-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/root-causes/bundle-tsconfig.json new file mode 100644 index 000000000..907e6c0d9 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/root-causes/bundle-tsconfig.json @@ -0,0 +1 @@ +{"compilerOptions":{"composite":true,"outDir":".","baseUrl":".","rootDir":"../../../../../../../front_end/models/trace/root-causes"},"files":["../../../../../../../front_end/models/trace/root-causes/root-causes.ts"],"references":[{"path":"./root-causes-tsconfig.json"}]} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/root-causes/devtools_entrypoint-bundle-typescript-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/root-causes/devtools_entrypoint-bundle-typescript-tsconfig.json new file mode 100644 index 000000000..01ebeac78 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/root-causes/devtools_entrypoint-bundle-typescript-tsconfig.json @@ -0,0 +1,43 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../../front_end/models/trace/root-causes", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "devtools_entrypoint-bundle-typescript-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../../front_end/models/trace/root-causes/root-causes.ts", + "../../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "./root-causes-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/root-causes/root-causes-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/root-causes/root-causes-tsconfig.json new file mode 100644 index 000000000..f53ddf51f --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/root-causes/root-causes-tsconfig.json @@ -0,0 +1,56 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../../front_end/models/trace/root-causes", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "root-causes-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../../front_end/models/trace/root-causes/LayoutShift.ts", + "../../../../../../../front_end/models/trace/root-causes/RootCauses.ts", + "../../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "../../../core/common/bundle-tsconfig.json" + }, + { + "path": "../../../core/platform/bundle-tsconfig.json" + }, + { + "path": "../../../core/root/bundle-tsconfig.json" + }, + { + "path": "../handlers/bundle-tsconfig.json" + }, + { + "path": "../types/bundle-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/root-causes/root-causes.js b/node_modules/@paulirish/trace_engine/models/trace/root-causes/root-causes.js new file mode 100644 index 000000000..1cfa27b91 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/root-causes/root-causes.js @@ -0,0 +1,5 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export * as RootCauses from './RootCauses.js'; +//# sourceMappingURL=root-causes.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/trace-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/trace-tsconfig.json new file mode 100644 index 000000000..58254c67a --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/trace-tsconfig.json @@ -0,0 +1,70 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../front_end/models/trace", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "trace-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../front_end/models/trace/EntriesFilter.ts", + "../../../../../../front_end/models/trace/ModelImpl.ts", + "../../../../../../front_end/models/trace/Processor.ts", + "../../../../../../front_end/models/trace/TracingManager.ts", + "../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "../../core/common/bundle-tsconfig.json" + }, + { + "path": "../../core/platform/bundle-tsconfig.json" + }, + { + "path": "../../core/sdk/bundle-tsconfig.json" + }, + { + "path": "extras/bundle-tsconfig.json" + }, + { + "path": "handlers/bundle-tsconfig.json" + }, + { + "path": "helpers/bundle-tsconfig.json" + }, + { + "path": "insights/bundle-tsconfig.json" + }, + { + "path": "root-causes/bundle-tsconfig.json" + }, + { + "path": "types/bundle-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/trace.js b/node_modules/@paulirish/trace_engine/models/trace/trace.js new file mode 100644 index 000000000..54e9ce589 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/trace.js @@ -0,0 +1,15 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +import * as EntriesFilter from './EntriesFilter.js'; +import * as Extras from './extras/extras.js'; +import * as Handlers from './handlers/handlers.js'; +import * as Helpers from './helpers/helpers.js'; +import * as Insights from './insights/insights.js'; +import * as TraceModel from './ModelImpl.js'; +import * as Processor from './Processor.js'; +import * as RootCauses from './root-causes/root-causes.js'; +import * as TracingManager from './TracingManager.js'; +import * as Types from './types/types.js'; +export { EntriesFilter, Extras, Handlers, Helpers, Insights, Processor, RootCauses, TraceModel, TracingManager, Types, }; +//# sourceMappingURL=trace.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/types/Configuration.js b/node_modules/@paulirish/trace_engine/models/trace/types/Configuration.js new file mode 100644 index 000000000..c94b0f7ee --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/types/Configuration.js @@ -0,0 +1,17 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export const defaults = () => ({ + includeRuntimeCallStats: false, + showAllEvents: false, + debugMode: false, +}); +/** + * Generates a key that can be used to represent this config in a cache. This is + * used mainly in tests, where we want to avoid re-parsing a file if we have + * already processed it with the same configuration. + */ +export function configToCacheKey(config) { + return JSON.stringify(config); +} +//# sourceMappingURL=Configuration.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/types/Extensions.js b/node_modules/@paulirish/trace_engine/models/trace/types/Extensions.js new file mode 100644 index 000000000..022206412 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/types/Extensions.js @@ -0,0 +1,38 @@ +// Copyright 2024 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +const extensionPalette = [ + 'primary', + 'primary-light', + 'primary-dark', + 'secondary', + 'secondary-light', + 'secondary-dark', + 'tertiary', + 'tertiary-light', + 'tertiary-dark', + 'error', +]; +export function colorIsValid(color) { + return extensionPalette.includes(color); +} +export function validateColorInPayload(payload) { + if (!('color' in payload) || !payload.color) { + return false; + } + const color = payload['color']; + return colorIsValid(color); +} +export function isExtensionPayloadMarker(payload) { + const colorIsValid = validateColorInPayload(payload); + return payload.metadata.dataType === "marker" /* ExtensionEntryType.MARKER */ && colorIsValid; +} +export function isExtensionPayloadFlameChartEntry(payload) { + const colorIsValid = validateColorInPayload(payload); + const hasTrack = 'track' in payload && Boolean(payload.track); + return payload.metadata.dataType === "track-entry" /* ExtensionEntryType.TRACK_ENTRY */ && hasTrack && colorIsValid; +} +export function isSyntheticExtensionEntry(entry) { + return entry.cat === 'devtools.extension'; +} +//# sourceMappingURL=Extensions.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/types/File.js b/node_modules/@paulirish/trace_engine/models/trace/types/File.js new file mode 100644 index 000000000..742709694 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/types/File.js @@ -0,0 +1,5 @@ +// Copyright 2023 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export {}; +//# sourceMappingURL=File.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/types/Timing.js b/node_modules/@paulirish/trace_engine/models/trace/types/Timing.js new file mode 100644 index 000000000..337b4bb6d --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/types/Timing.js @@ -0,0 +1,13 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export function MicroSeconds(value) { + return value; +} +export function MilliSeconds(value) { + return value; +} +export function Seconds(value) { + return value; +} +//# sourceMappingURL=Timing.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/types/TraceEvents.js b/node_modules/@paulirish/trace_engine/models/trace/types/TraceEvents.js new file mode 100644 index 000000000..5eef29f00 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/types/TraceEvents.js @@ -0,0 +1,458 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export function isNestableAsyncPhase(phase) { + return phase === "b" /* Phase.ASYNC_NESTABLE_START */ || phase === "e" /* Phase.ASYNC_NESTABLE_END */ || + phase === "n" /* Phase.ASYNC_NESTABLE_INSTANT */; +} +export function isAsyncPhase(phase) { + return isNestableAsyncPhase(phase) || phase === "S" /* Phase.ASYNC_BEGIN */ || phase === "T" /* Phase.ASYNC_STEP_INTO */ || + phase === "F" /* Phase.ASYNC_END */ || phase === "p" /* Phase.ASYNC_STEP_PAST */; +} +export function isFlowPhase(phase) { + return phase === "s" /* Phase.FLOW_START */ || phase === "t" /* Phase.FLOW_STEP */ || phase === "f" /* Phase.FLOW_END */; +} +export function objectIsTraceEventCallFrame(object) { + return ('functionName' in object && typeof object.functionName === 'string') && + ('scriptId' in object && (typeof object.scriptId === 'string' || typeof object.scriptId === 'number')) && + ('columnNumber' in object && typeof object.columnNumber === 'number') && + ('lineNumber' in object && typeof object.lineNumber === 'number') && + ('url' in object && typeof object.url === 'string'); +} +export function isTraceEventRunTask(event) { + return event.name === "RunTask" /* KnownEventName.RunTask */; +} +export function isTraceEventAuctionWorkletRunningInProcess(event) { + return event.name === 'AuctionWorkletRunningInProcess'; +} +export function isTraceEventAuctionWorkletDoneWithProcess(event) { + return event.name === 'AuctionWorkletDoneWithProcess'; +} +export function isTraceEventScreenshot(event) { + return event.name === "Screenshot" /* KnownEventName.Screenshot */; +} +const markerTypeGuards = [ + isTraceEventMarkDOMContent, + isTraceEventMarkLoad, + isTraceEventFirstPaint, + isTraceEventFirstContentfulPaint, + isTraceEventLargestContentfulPaintCandidate, + isTraceEventNavigationStart, +]; +export const MarkerName = ['MarkDOMContent', 'MarkLoad', 'firstPaint', 'firstContentfulPaint', 'largestContentfulPaint::Candidate']; +export function isTraceEventMarkerEvent(event) { + return markerTypeGuards.some(fn => fn(event)); +} +const pageLoadEventTypeGuards = [ + ...markerTypeGuards, + isTraceEventInteractiveTime, +]; +export function eventIsPageLoadEvent(event) { + return pageLoadEventTypeGuards.some(fn => fn(event)); +} +export function isTraceEventTracingSessionIdForWorker(event) { + return event.name === 'TracingSessionIdForWorker'; +} +export function isTraceEventScheduleStyleInvalidationTracking(event) { + return event.name === "ScheduleStyleInvalidationTracking" /* KnownEventName.ScheduleStyleInvalidationTracking */; +} +export function isTraceEventStyleRecalcInvalidationTracking(event) { + return event.name === "StyleRecalcInvalidationTracking" /* KnownEventName.StyleRecalcInvalidationTracking */; +} +export function isTraceEventStyleInvalidatorInvalidationTracking(event) { + return event.name === "StyleInvalidatorInvalidationTracking" /* KnownEventName.StyleInvalidatorInvalidationTracking */; +} +export function isTraceEventBeginCommitCompositorFrame(event) { + return event.name === "BeginCommitCompositorFrame" /* KnownEventName.BeginCommitCompositorFrame */; +} +export function isTraceEventScheduleStyleRecalculation(event) { + return event.name === "ScheduleStyleRecalculation" /* KnownEventName.ScheduleStyleRecalculation */; +} +export function isTraceEventPipelineReporter(event) { + return event.name === "PipelineReporter" /* KnownEventName.PipelineReporter */; +} +export function isSyntheticEvent(event) { + return 'rawSourceEvent' in event; +} +export function isSyntheticInteractionEvent(event) { + return Boolean('interactionId' in event && event.args?.data && 'beginEvent' in event.args.data && 'endEvent' in event.args.data); +} +export function isSyntheticTraceEntry(event) { + return isTraceEventRendererEvent(event) || isProfileCall(event); +} +export function isTraceEventDrawFrame(event) { + // The extra check for INSTANT here is because in the past DrawFrame events had an ASYNC_NESTABLE_START and ASYNC_NESTABLE_END pair. We don't want to support those old events, so we have to check we are dealing with an instant event. + return event.name === "DrawFrame" /* KnownEventName.DrawFrame */ && event.ph === "I" /* Phase.INSTANT */; +} +export function isLegacyTraceEventDrawFrameBegin(event) { + return event.name === "DrawFrame" /* KnownEventName.DrawFrame */ && event.ph === "b" /* Phase.ASYNC_NESTABLE_START */; +} +export function isTraceEventBeginFrame(event) { + // Old traces did not have frameSeqId; but we do not want to support these. + return Boolean(event.name === "BeginFrame" /* KnownEventName.BeginFrame */ && event.args && 'frameSeqId' in event.args); +} +export function isTraceEventDroppedFrame(event) { + // Old traces did not have frameSeqId; but we do not want to support these. + return Boolean(event.name === "DroppedFrame" /* KnownEventName.DroppedFrame */ && event.args && 'frameSeqId' in event.args); +} +export function isTraceEventRequestMainThreadFrame(event) { + return event.name === "RequestMainThreadFrame" /* KnownEventName.RequestMainThreadFrame */; +} +export function isTraceEventBeginMainThreadFrame(event) { + return event.name === "BeginMainThreadFrame" /* KnownEventName.BeginMainThreadFrame */; +} +export function isTraceEventNeedsBeginFrameChanged(event) { + return event.name === "NeedsBeginFrameChanged" /* KnownEventName.NeedsBeginFrameChanged */; +} +export function isTraceEventCommit(event) { + // Old traces did not have frameSeqId; but we do not want to support these. + return Boolean(event.name === "Commit" /* KnownEventName.Commit */ && event.args && 'frameSeqId' in event.args); +} +export function isTraceEventRasterTask(event) { + return event.name === "RasterTask" /* KnownEventName.RasterTask */; +} +export function isTraceEventCompositeLayers(event) { + return event.name === "CompositeLayers" /* KnownEventName.CompositeLayers */; +} +export function isTraceEventActivateLayerTree(event) { + return event.name === "ActivateLayerTree" /* KnownEventName.ActivateLayerTree */; +} +export function isSyntheticInvalidation(event) { + return event.name === 'SyntheticInvalidation'; +} +export function isTraceEventDrawLazyPixelRef(event) { + return event.name === "Draw LazyPixelRef" /* KnownEventName.DrawLazyPixelRef */; +} +export function isTraceEventDecodeLazyPixelRef(event) { + return event.name === "Decode LazyPixelRef" /* KnownEventName.DecodeLazyPixelRef */; +} +export function isTraceEventDecodeImage(event) { + return event.name === "Decode Image" /* KnownEventName.DecodeImage */; +} +export function isTraceEventSelectorStats(event) { + return event.name === "SelectorStats" /* KnownEventName.SelectorStats */; +} +export function isTraceEventUpdateLayoutTree(event) { + return event.name === "UpdateLayoutTree" /* KnownEventName.UpdateLayoutTree */; +} +export function isTraceEventLayout(event) { + return event.name === "Layout" /* KnownEventName.Layout */; +} +export function isTraceEventInvalidateLayout(event) { + return event.name === "InvalidateLayout" /* KnownEventName.InvalidateLayout */; +} +class ProfileIdTag { + #profileIdTag; +} +// eslint-disable-next-line @typescript-eslint/naming-convention +export function ProfileID(value) { + return value; +} +class CallFrameIdTag { + #callFrameIdTag; +} +// eslint-disable-next-line @typescript-eslint/naming-convention +export function CallFrameID(value) { + return value; +} +class ProcessIdTag { + #processIdTag; +} +// eslint-disable-next-line @typescript-eslint/naming-convention +export function ProcessID(value) { + return value; +} +class ThreadIdTag { + #threadIdTag; +} +// eslint-disable-next-line @typescript-eslint/naming-convention +export function ThreadID(value) { + return value; +} +class WorkerIdTag { + #workerIdTag; +} +// eslint-disable-next-line @typescript-eslint/naming-convention +export function WorkerId(value) { + return value; +} +export function isTraceEventComplete(event) { + return event.ph === "X" /* Phase.COMPLETE */; +} +export function isTraceEventBegin(event) { + return event.ph === "B" /* Phase.BEGIN */; +} +export function isTraceEventEnd(event) { + return event.ph === "E" /* Phase.END */; +} +export function isTraceEventDispatch(event) { + return event.name === 'EventDispatch'; +} +export function isTraceEventInstant(event) { + return event.ph === "I" /* Phase.INSTANT */; +} +export function isTraceEventRendererEvent(event) { + return isTraceEventInstant(event) || isTraceEventComplete(event); +} +export function isTraceEventFireIdleCallback(event) { + return event.name === 'FireIdleCallback'; +} +export function isTraceEventSchedulePostMessage(event) { + return event.name === "SchedulePostMessage" /* KnownEventName.SchedulePostMessage */; +} +export function isTraceEventHandlePostMessage(event) { + return event.name === "HandlePostMessage" /* KnownEventName.HandlePostMessage */; +} +export function isTraceEventUpdateCounters(event) { + return event.name === 'UpdateCounters'; +} +export function isThreadName(traceEventData) { + return traceEventData.name === "thread_name" /* KnownEventName.ThreadName */; +} +export function isProcessName(traceEventData) { + return traceEventData.name === 'process_name'; +} +export function isTraceEventTracingStartedInBrowser(traceEventData) { + return traceEventData.name === "TracingStartedInBrowser" /* KnownEventName.TracingStartedInBrowser */; +} +export function isTraceEventFrameCommittedInBrowser(traceEventData) { + return traceEventData.name === 'FrameCommittedInBrowser'; +} +export function isTraceEventCommitLoad(traceEventData) { + return traceEventData.name === 'CommitLoad'; +} +export function isTraceEventNavigationStart(traceEventData) { + return traceEventData.name === 'navigationStart'; +} +export function isTraceEventAnimation(traceEventData) { + // We've found some rare traces with an Animtation trace event from a different category: https://crbug.com/1472375#comment7 + return traceEventData.name === 'Animation' && traceEventData.cat.includes('devtools.timeline'); +} +export function isTraceEventLayoutShift(traceEventData) { + return traceEventData.name === 'LayoutShift'; +} +export function isTraceEventLayoutInvalidationTracking(traceEventData) { + return traceEventData.name === "LayoutInvalidationTracking" /* KnownEventName.LayoutInvalidationTracking */; +} +export function isTraceEventFirstContentfulPaint(traceEventData) { + return traceEventData.name === 'firstContentfulPaint'; +} +export function isTraceEventLargestContentfulPaintCandidate(traceEventData) { + return traceEventData.name === "largestContentfulPaint::Candidate" /* KnownEventName.MarkLCPCandidate */; +} +export function isTraceEventLargestImagePaintCandidate(traceEventData) { + return traceEventData.name === 'LargestImagePaint::Candidate'; +} +export function isTraceEventLargestTextPaintCandidate(traceEventData) { + return traceEventData.name === 'LargestTextPaint::Candidate'; +} +export function isTraceEventMarkLoad(traceEventData) { + return traceEventData.name === 'MarkLoad'; +} +export function isTraceEventFirstPaint(traceEventData) { + return traceEventData.name === 'firstPaint'; +} +export function isTraceEventMarkDOMContent(traceEventData) { + return traceEventData.name === 'MarkDOMContent'; +} +export function isTraceEventInteractiveTime(traceEventData) { + return traceEventData.name === 'InteractiveTime'; +} +export function isTraceEventEventTiming(traceEventData) { + return traceEventData.name === "EventTiming" /* KnownEventName.EventTiming */; +} +export function isTraceEventEventTimingEnd(traceEventData) { + return isTraceEventEventTiming(traceEventData) && traceEventData.ph === "e" /* Phase.ASYNC_NESTABLE_END */; +} +export function isTraceEventEventTimingStart(traceEventData) { + return isTraceEventEventTiming(traceEventData) && traceEventData.ph === "b" /* Phase.ASYNC_NESTABLE_START */; +} +export function isTraceEventGPUTask(traceEventData) { + return traceEventData.name === 'GPUTask'; +} +export function isTraceEventProfile(traceEventData) { + return traceEventData.name === 'Profile'; +} +export function isSyntheticCpuProfile(traceEventData) { + return traceEventData.name === 'CpuProfile'; +} +export function isTraceEventProfileChunk(traceEventData) { + return traceEventData.name === 'ProfileChunk'; +} +export function isTraceEventResourceChangePriority(traceEventData) { + return traceEventData.name === 'ResourceChangePriority'; +} +export function isTraceEventResourceSendRequest(traceEventData) { + return traceEventData.name === 'ResourceSendRequest'; +} +export function isTraceEventResourceReceiveResponse(traceEventData) { + return traceEventData.name === 'ResourceReceiveResponse'; +} +export function isTraceEventResourceMarkAsCached(traceEventData) { + return traceEventData.name === 'ResourceMarkAsCached'; +} +export function isTraceEventResourceFinish(traceEventData) { + return traceEventData.name === 'ResourceFinish'; +} +export function isTraceEventResourceWillSendRequest(traceEventData) { + return traceEventData.name === 'ResourceWillSendRequest'; +} +export function isTraceEventResourceReceivedData(traceEventData) { + return traceEventData.name === 'ResourceReceivedData'; +} +export function isSyntheticNetworkRequestDetailsEvent(traceEventData) { + return traceEventData.name === 'SyntheticNetworkRequest'; +} +export function isTraceEventPrePaint(traceEventData) { + return traceEventData.name === 'PrePaint'; +} +export function isTraceEventNavigationStartWithURL(event) { + return Boolean(isTraceEventNavigationStart(event) && event.args.data && event.args.data.documentLoaderURL !== ''); +} +export function isTraceEventMainFrameViewport(traceEventData) { + return traceEventData.name === 'PaintTimingVisualizer::Viewport'; +} +export function isSyntheticUserTiming(traceEventData) { + if (traceEventData.cat !== 'blink.user_timing') { + return false; + } + const data = traceEventData.args?.data; + if (!data) { + return false; + } + return 'beginEvent' in data && 'endEvent' in data; +} +export function isSyntheticConsoleTiming(traceEventData) { + if (traceEventData.cat !== 'blink.console') { + return false; + } + const data = traceEventData.args?.data; + if (!data) { + return false; + } + return 'beginEvent' in data && 'endEvent' in data; +} +export function isTraceEventPerformanceMeasure(traceEventData) { + return traceEventData.cat === 'blink.user_timing' && isTraceEventAsyncPhase(traceEventData); +} +export function isTraceEventPerformanceMark(traceEventData) { + return traceEventData.cat === 'blink.user_timing' && + (traceEventData.ph === "R" /* Phase.MARK */ || traceEventData.ph === "I" /* Phase.INSTANT */); +} +export function isTraceEventConsoleTime(traceEventData) { + return traceEventData.cat === 'blink.console' && isTraceEventAsyncPhase(traceEventData); +} +export function isTraceEventTimeStamp(traceEventData) { + return traceEventData.ph === "I" /* Phase.INSTANT */ && traceEventData.name === 'TimeStamp'; +} +export function isTraceEventParseHTML(traceEventData) { + return traceEventData.name === 'ParseHTML'; +} +const asyncPhases = new Set([ + "b" /* Phase.ASYNC_NESTABLE_START */, + "n" /* Phase.ASYNC_NESTABLE_INSTANT */, + "e" /* Phase.ASYNC_NESTABLE_END */, + "T" /* Phase.ASYNC_STEP_INTO */, + "S" /* Phase.ASYNC_BEGIN */, + "F" /* Phase.ASYNC_END */, + "p" /* Phase.ASYNC_STEP_PAST */, +]); +export function isTraceEventAsyncPhase(traceEventData) { + return asyncPhases.has(traceEventData.ph); +} +export function isSyntheticLayoutShift(traceEventData) { + if (!isTraceEventLayoutShift(traceEventData) || !traceEventData.args.data) { + return false; + } + return 'rawEvent' in traceEventData.args.data; +} +export function isProfileCall(event) { + return 'callFrame' in event; +} +export function isTraceEventPaint(event) { + return event.name === "Paint" /* KnownEventName.Paint */; +} +export function isTraceEventPaintImage(event) { + return event.name === "PaintImage" /* KnownEventName.PaintImage */; +} +export function isTraceEventScrollLayer(event) { + return event.name === "ScrollLayer" /* KnownEventName.ScrollLayer */; +} +export function isTraceEventSetLayerId(event) { + return event.name === "SetLayerTreeId" /* KnownEventName.SetLayerTreeId */; +} +export function isTraceEventUpdateLayer(event) { + return event.name === "UpdateLayer" /* KnownEventName.UpdateLayer */; +} +export function isTraceEventDisplayListItemListSnapshot(event) { + return event.name === "cc::DisplayItemList" /* KnownEventName.DisplayItemListSnapshot */; +} +export function isTraceEventLayerTreeHostImplSnapshot(event) { + return event.name === "cc::LayerTreeHostImpl" /* KnownEventName.LayerTreeHostImplSnapshot */; +} +export function isTraceEventFireAnimationFrame(event) { + return event.name === "FireAnimationFrame" /* KnownEventName.FireAnimationFrame */; +} +export function isTraceEventRequestAnimationFrame(event) { + return event.name === "RequestAnimationFrame" /* KnownEventName.RequestAnimationFrame */; +} +export function isTraceEventTimerInstall(event) { + return event.name === "TimerInstall" /* KnownEventName.TimerInstall */; +} +export function isTraceEventTimerFire(event) { + return event.name === "TimerFire" /* KnownEventName.TimerFire */; +} +export function isTraceEventRequestIdleCallback(event) { + return event.name === "RequestIdleCallback" /* KnownEventName.RequestIdleCallback */; +} +export function isTraceEventWebSocketCreate(event) { + return event.name === "WebSocketCreate" /* KnownEventName.WebSocketCreate */; +} +export function isTraceEventWebSocketSendHandshakeRequest(event) { + return event.name === "WebSocketSendHandshakeRequest" /* KnownEventName.WebSocketSendHandshakeRequest */; +} +export function isTraceEventWebSocketReceiveHandshakeResponse(event) { + return event.name === "WebSocketReceiveHandshakeResponse" /* KnownEventName.WebSocketReceiveHandshakeResponse */; +} +export function isTraceEventWebSocketDestroy(event) { + return event.name === "WebSocketDestroy" /* KnownEventName.WebSocketDestroy */; +} +export function isWebSocketTraceEvent(event) { + return isTraceEventWebSocketCreate(event) || isTraceEventWebSocketDestroy(event) || + isTraceEventWebSocketReceiveHandshakeResponse(event) || isTraceEventWebSocketSendHandshakeRequest(event); +} +export function isTraceEventV8Compile(event) { + return event.name === "v8.compile" /* KnownEventName.Compile */; +} +export function isTraceEventFunctionCall(event) { + return event.name === "FunctionCall" /* KnownEventName.FunctionCall */; +} +/** + * Generally, before JS is executed, a trace event is dispatched that + * parents the JS calls. These we call "invocation" events. This + * function determines if an event is one of such. + */ +export function isJSInvocationEvent(event) { + switch (event.name) { + case "RunMicrotasks" /* KnownEventName.RunMicrotasks */: + case "FunctionCall" /* KnownEventName.FunctionCall */: + case "EvaluateScript" /* KnownEventName.EvaluateScript */: + case "v8.evaluateModule" /* KnownEventName.EvaluateModule */: + case "EventDispatch" /* KnownEventName.EventDispatch */: + case "V8.Execute" /* KnownEventName.V8Execute */: + return true; + } + // Also consider any new v8 trace events. (eg 'V8.RunMicrotasks' and 'v8.run') + if (event.name.startsWith('v8') || event.name.startsWith('V8')) { + return true; + } + return false; +} +// NOT AN EXHAUSTIVE LIST: just some categories we use and refer +// to in multiple places. +export const Categories = { + Console: 'blink.console', + UserTiming: 'blink.user_timing', + Loading: 'loading', +}; +//# sourceMappingURL=TraceEvents.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/types/bundle-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/types/bundle-tsconfig.json new file mode 100644 index 000000000..8237182aa --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/types/bundle-tsconfig.json @@ -0,0 +1 @@ +{"compilerOptions":{"composite":true,"outDir":".","baseUrl":".","rootDir":"../../../../../../../front_end/models/trace/types"},"files":["../../../../../../../front_end/models/trace/types/types.ts"],"references":[{"path":"./types-tsconfig.json"}]} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/types/devtools_entrypoint-bundle-typescript-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/types/devtools_entrypoint-bundle-typescript-tsconfig.json new file mode 100644 index 000000000..53a0a3791 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/types/devtools_entrypoint-bundle-typescript-tsconfig.json @@ -0,0 +1,43 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../../front_end/models/trace/types", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "devtools_entrypoint-bundle-typescript-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../../front_end/models/trace/types/types.ts", + "../../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "./types-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/types/types-tsconfig.json b/node_modules/@paulirish/trace_engine/models/trace/types/types-tsconfig.json new file mode 100644 index 000000000..c9a1d9cc9 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/types/types-tsconfig.json @@ -0,0 +1,47 @@ +{ + "compilerOptions": { + "allowJs": true, + "checkJs": true, + "composite": true, + "declaration": true, + "experimentalDecorators": true, + "forceConsistentCasingInFileNames": true, + "inlineSources": true, + "lib": [ + "esnext", + "dom", + "dom.iterable" + ], + "module": "esnext", + "noEmitOnError": true, + "noFallthroughCasesInSwitch": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noUnusedLocals": false, + "outDir": ".", + "rootDir": "../../../../../../../front_end/models/trace/types", + "skipLibCheck": true, + "sourceMap": true, + "strict": true, + "target": "esnext", + "tsBuildInfoFile": "types-tsconfig.json.tsbuildinfo", + "typeRoots": [], + "useUnknownInCatchVariables": false + }, + "files": [ + "../../../../../../../front_end/models/trace/types/Configuration.ts", + "../../../../../../../front_end/models/trace/types/Extensions.ts", + "../../../../../../../front_end/models/trace/types/File.ts", + "../../../../../../../front_end/models/trace/types/Timing.ts", + "../../../../../../../front_end/models/trace/types/TraceEvents.ts", + "../../../../../../../front_end/legacy/legacy-defs.d.ts", + "../../../../../../../front_end/global_typings/global_defs.d.ts", + "../../../../../../../front_end/global_typings/request_idle_callback.d.ts", + "../../../../../../../node_modules/@types/filesystem/index.d.ts" + ], + "references": [ + { + "path": "../../../generated/generated-tsconfig.json" + } + ] +} \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/models/trace/types/types.js b/node_modules/@paulirish/trace_engine/models/trace/types/types.js new file mode 100644 index 000000000..6d33dfbef --- /dev/null +++ b/node_modules/@paulirish/trace_engine/models/trace/types/types.js @@ -0,0 +1,9 @@ +// Copyright 2022 The Chromium Authors. All rights reserved. +// Use of this source code is governed by a BSD-style license that can be +// found in the LICENSE file. +export * as Configuration from './Configuration.js'; +export * as Extensions from './Extensions.js'; +export * as File from './File.js'; +export * as Timing from './Timing.js'; +export * as TraceEvents from './TraceEvents.js'; +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/node_modules/@paulirish/trace_engine/package.json b/node_modules/@paulirish/trace_engine/package.json new file mode 100644 index 000000000..f234f9846 --- /dev/null +++ b/node_modules/@paulirish/trace_engine/package.json @@ -0,0 +1,16 @@ +{ + "name": "@paulirish/trace_engine", + "version": "0.0.23", + "description": "", + "main": "models/trace/trace.js", + "scripts": { + "test": "node test/test-trace-engine.mjs", + "prepublishOnly": "npm test" + }, + "type": "module", + "keywords": [], + "author": "", + "license": "BSD-3-Clause", + "dependencies": {}, + "devDependencies": {} +} diff --git a/node_modules/@pkgjs/parseargs/index.js b/node_modules/@pkgjs/parseargs/index.js new file mode 100644 index 000000000..b1004c7b7 --- /dev/null +++ b/node_modules/@pkgjs/parseargs/index.js @@ -0,0 +1,396 @@ +'use strict'; + +const { + ArrayPrototypeForEach, + ArrayPrototypeIncludes, + ArrayPrototypeMap, + ArrayPrototypePush, + ArrayPrototypePushApply, + ArrayPrototypeShift, + ArrayPrototypeSlice, + ArrayPrototypeUnshiftApply, + ObjectEntries, + ObjectPrototypeHasOwnProperty: ObjectHasOwn, + StringPrototypeCharAt, + StringPrototypeIndexOf, + StringPrototypeSlice, + StringPrototypeStartsWith, +} = require('./internal/primordials'); + +const { + validateArray, + validateBoolean, + validateBooleanArray, + validateObject, + validateString, + validateStringArray, + validateUnion, +} = require('./internal/validators'); + +const { + kEmptyObject, +} = require('./internal/util'); + +const { + findLongOptionForShort, + isLoneLongOption, + isLoneShortOption, + isLongOptionAndValue, + isOptionValue, + isOptionLikeValue, + isShortOptionAndValue, + isShortOptionGroup, + useDefaultValueOption, + objectGetOwn, + optionsGetOwn, +} = require('./utils'); + +const { + codes: { + ERR_INVALID_ARG_VALUE, + ERR_PARSE_ARGS_INVALID_OPTION_VALUE, + ERR_PARSE_ARGS_UNKNOWN_OPTION, + ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL, + }, +} = require('./internal/errors'); + +function getMainArgs() { + // Work out where to slice process.argv for user supplied arguments. + + // Check node options for scenarios where user CLI args follow executable. + const execArgv = process.execArgv; + if (ArrayPrototypeIncludes(execArgv, '-e') || + ArrayPrototypeIncludes(execArgv, '--eval') || + ArrayPrototypeIncludes(execArgv, '-p') || + ArrayPrototypeIncludes(execArgv, '--print')) { + return ArrayPrototypeSlice(process.argv, 1); + } + + // Normally first two arguments are executable and script, then CLI arguments + return ArrayPrototypeSlice(process.argv, 2); +} + +/** + * In strict mode, throw for possible usage errors like --foo --bar + * + * @param {object} token - from tokens as available from parseArgs + */ +function checkOptionLikeValue(token) { + if (!token.inlineValue && isOptionLikeValue(token.value)) { + // Only show short example if user used short option. + const example = StringPrototypeStartsWith(token.rawName, '--') ? + `'${token.rawName}=-XYZ'` : + `'--${token.name}=-XYZ' or '${token.rawName}-XYZ'`; + const errorMessage = `Option '${token.rawName}' argument is ambiguous. +Did you forget to specify the option argument for '${token.rawName}'? +To specify an option argument starting with a dash use ${example}.`; + throw new ERR_PARSE_ARGS_INVALID_OPTION_VALUE(errorMessage); + } +} + +/** + * In strict mode, throw for usage errors. + * + * @param {object} config - from config passed to parseArgs + * @param {object} token - from tokens as available from parseArgs + */ +function checkOptionUsage(config, token) { + if (!ObjectHasOwn(config.options, token.name)) { + throw new ERR_PARSE_ARGS_UNKNOWN_OPTION( + token.rawName, config.allowPositionals); + } + + const short = optionsGetOwn(config.options, token.name, 'short'); + const shortAndLong = `${short ? `-${short}, ` : ''}--${token.name}`; + const type = optionsGetOwn(config.options, token.name, 'type'); + if (type === 'string' && typeof token.value !== 'string') { + throw new ERR_PARSE_ARGS_INVALID_OPTION_VALUE(`Option '${shortAndLong} ' argument missing`); + } + // (Idiomatic test for undefined||null, expecting undefined.) + if (type === 'boolean' && token.value != null) { + throw new ERR_PARSE_ARGS_INVALID_OPTION_VALUE(`Option '${shortAndLong}' does not take an argument`); + } +} + + +/** + * Store the option value in `values`. + * + * @param {string} longOption - long option name e.g. 'foo' + * @param {string|undefined} optionValue - value from user args + * @param {object} options - option configs, from parseArgs({ options }) + * @param {object} values - option values returned in `values` by parseArgs + */ +function storeOption(longOption, optionValue, options, values) { + if (longOption === '__proto__') { + return; // No. Just no. + } + + // We store based on the option value rather than option type, + // preserving the users intent for author to deal with. + const newValue = optionValue ?? true; + if (optionsGetOwn(options, longOption, 'multiple')) { + // Always store value in array, including for boolean. + // values[longOption] starts out not present, + // first value is added as new array [newValue], + // subsequent values are pushed to existing array. + // (note: values has null prototype, so simpler usage) + if (values[longOption]) { + ArrayPrototypePush(values[longOption], newValue); + } else { + values[longOption] = [newValue]; + } + } else { + values[longOption] = newValue; + } +} + +/** + * Store the default option value in `values`. + * + * @param {string} longOption - long option name e.g. 'foo' + * @param {string + * | boolean + * | string[] + * | boolean[]} optionValue - default value from option config + * @param {object} values - option values returned in `values` by parseArgs + */ +function storeDefaultOption(longOption, optionValue, values) { + if (longOption === '__proto__') { + return; // No. Just no. + } + + values[longOption] = optionValue; +} + +/** + * Process args and turn into identified tokens: + * - option (along with value, if any) + * - positional + * - option-terminator + * + * @param {string[]} args - from parseArgs({ args }) or mainArgs + * @param {object} options - option configs, from parseArgs({ options }) + */ +function argsToTokens(args, options) { + const tokens = []; + let index = -1; + let groupCount = 0; + + const remainingArgs = ArrayPrototypeSlice(args); + while (remainingArgs.length > 0) { + const arg = ArrayPrototypeShift(remainingArgs); + const nextArg = remainingArgs[0]; + if (groupCount > 0) + groupCount--; + else + index++; + + // Check if `arg` is an options terminator. + // Guideline 10 in https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html + if (arg === '--') { + // Everything after a bare '--' is considered a positional argument. + ArrayPrototypePush(tokens, { kind: 'option-terminator', index }); + ArrayPrototypePushApply( + tokens, ArrayPrototypeMap(remainingArgs, (arg) => { + return { kind: 'positional', index: ++index, value: arg }; + }) + ); + break; // Finished processing args, leave while loop. + } + + if (isLoneShortOption(arg)) { + // e.g. '-f' + const shortOption = StringPrototypeCharAt(arg, 1); + const longOption = findLongOptionForShort(shortOption, options); + let value; + let inlineValue; + if (optionsGetOwn(options, longOption, 'type') === 'string' && + isOptionValue(nextArg)) { + // e.g. '-f', 'bar' + value = ArrayPrototypeShift(remainingArgs); + inlineValue = false; + } + ArrayPrototypePush( + tokens, + { kind: 'option', name: longOption, rawName: arg, + index, value, inlineValue }); + if (value != null) ++index; + continue; + } + + if (isShortOptionGroup(arg, options)) { + // Expand -fXzy to -f -X -z -y + const expanded = []; + for (let index = 1; index < arg.length; index++) { + const shortOption = StringPrototypeCharAt(arg, index); + const longOption = findLongOptionForShort(shortOption, options); + if (optionsGetOwn(options, longOption, 'type') !== 'string' || + index === arg.length - 1) { + // Boolean option, or last short in group. Well formed. + ArrayPrototypePush(expanded, `-${shortOption}`); + } else { + // String option in middle. Yuck. + // Expand -abfFILE to -a -b -fFILE + ArrayPrototypePush(expanded, `-${StringPrototypeSlice(arg, index)}`); + break; // finished short group + } + } + ArrayPrototypeUnshiftApply(remainingArgs, expanded); + groupCount = expanded.length; + continue; + } + + if (isShortOptionAndValue(arg, options)) { + // e.g. -fFILE + const shortOption = StringPrototypeCharAt(arg, 1); + const longOption = findLongOptionForShort(shortOption, options); + const value = StringPrototypeSlice(arg, 2); + ArrayPrototypePush( + tokens, + { kind: 'option', name: longOption, rawName: `-${shortOption}`, + index, value, inlineValue: true }); + continue; + } + + if (isLoneLongOption(arg)) { + // e.g. '--foo' + const longOption = StringPrototypeSlice(arg, 2); + let value; + let inlineValue; + if (optionsGetOwn(options, longOption, 'type') === 'string' && + isOptionValue(nextArg)) { + // e.g. '--foo', 'bar' + value = ArrayPrototypeShift(remainingArgs); + inlineValue = false; + } + ArrayPrototypePush( + tokens, + { kind: 'option', name: longOption, rawName: arg, + index, value, inlineValue }); + if (value != null) ++index; + continue; + } + + if (isLongOptionAndValue(arg)) { + // e.g. --foo=bar + const equalIndex = StringPrototypeIndexOf(arg, '='); + const longOption = StringPrototypeSlice(arg, 2, equalIndex); + const value = StringPrototypeSlice(arg, equalIndex + 1); + ArrayPrototypePush( + tokens, + { kind: 'option', name: longOption, rawName: `--${longOption}`, + index, value, inlineValue: true }); + continue; + } + + ArrayPrototypePush(tokens, { kind: 'positional', index, value: arg }); + } + + return tokens; +} + +const parseArgs = (config = kEmptyObject) => { + const args = objectGetOwn(config, 'args') ?? getMainArgs(); + const strict = objectGetOwn(config, 'strict') ?? true; + const allowPositionals = objectGetOwn(config, 'allowPositionals') ?? !strict; + const returnTokens = objectGetOwn(config, 'tokens') ?? false; + const options = objectGetOwn(config, 'options') ?? { __proto__: null }; + // Bundle these up for passing to strict-mode checks. + const parseConfig = { args, strict, options, allowPositionals }; + + // Validate input configuration. + validateArray(args, 'args'); + validateBoolean(strict, 'strict'); + validateBoolean(allowPositionals, 'allowPositionals'); + validateBoolean(returnTokens, 'tokens'); + validateObject(options, 'options'); + ArrayPrototypeForEach( + ObjectEntries(options), + ({ 0: longOption, 1: optionConfig }) => { + validateObject(optionConfig, `options.${longOption}`); + + // type is required + const optionType = objectGetOwn(optionConfig, 'type'); + validateUnion(optionType, `options.${longOption}.type`, ['string', 'boolean']); + + if (ObjectHasOwn(optionConfig, 'short')) { + const shortOption = optionConfig.short; + validateString(shortOption, `options.${longOption}.short`); + if (shortOption.length !== 1) { + throw new ERR_INVALID_ARG_VALUE( + `options.${longOption}.short`, + shortOption, + 'must be a single character' + ); + } + } + + const multipleOption = objectGetOwn(optionConfig, 'multiple'); + if (ObjectHasOwn(optionConfig, 'multiple')) { + validateBoolean(multipleOption, `options.${longOption}.multiple`); + } + + const defaultValue = objectGetOwn(optionConfig, 'default'); + if (defaultValue !== undefined) { + let validator; + switch (optionType) { + case 'string': + validator = multipleOption ? validateStringArray : validateString; + break; + + case 'boolean': + validator = multipleOption ? validateBooleanArray : validateBoolean; + break; + } + validator(defaultValue, `options.${longOption}.default`); + } + } + ); + + // Phase 1: identify tokens + const tokens = argsToTokens(args, options); + + // Phase 2: process tokens into parsed option values and positionals + const result = { + values: { __proto__: null }, + positionals: [], + }; + if (returnTokens) { + result.tokens = tokens; + } + ArrayPrototypeForEach(tokens, (token) => { + if (token.kind === 'option') { + if (strict) { + checkOptionUsage(parseConfig, token); + checkOptionLikeValue(token); + } + storeOption(token.name, token.value, options, result.values); + } else if (token.kind === 'positional') { + if (!allowPositionals) { + throw new ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL(token.value); + } + ArrayPrototypePush(result.positionals, token.value); + } + }); + + // Phase 3: fill in default values for missing args + ArrayPrototypeForEach(ObjectEntries(options), ({ 0: longOption, + 1: optionConfig }) => { + const mustSetDefault = useDefaultValueOption(longOption, + optionConfig, + result.values); + if (mustSetDefault) { + storeDefaultOption(longOption, + objectGetOwn(optionConfig, 'default'), + result.values); + } + }); + + + return result; +}; + +module.exports = { + parseArgs, +}; diff --git a/node_modules/@pkgjs/parseargs/internal/errors.js b/node_modules/@pkgjs/parseargs/internal/errors.js new file mode 100644 index 000000000..e1b237b5b --- /dev/null +++ b/node_modules/@pkgjs/parseargs/internal/errors.js @@ -0,0 +1,47 @@ +'use strict'; + +class ERR_INVALID_ARG_TYPE extends TypeError { + constructor(name, expected, actual) { + super(`${name} must be ${expected} got ${actual}`); + this.code = 'ERR_INVALID_ARG_TYPE'; + } +} + +class ERR_INVALID_ARG_VALUE extends TypeError { + constructor(arg1, arg2, expected) { + super(`The property ${arg1} ${expected}. Received '${arg2}'`); + this.code = 'ERR_INVALID_ARG_VALUE'; + } +} + +class ERR_PARSE_ARGS_INVALID_OPTION_VALUE extends Error { + constructor(message) { + super(message); + this.code = 'ERR_PARSE_ARGS_INVALID_OPTION_VALUE'; + } +} + +class ERR_PARSE_ARGS_UNKNOWN_OPTION extends Error { + constructor(option, allowPositionals) { + const suggestDashDash = allowPositionals ? `. To specify a positional argument starting with a '-', place it at the end of the command after '--', as in '-- ${JSON.stringify(option)}` : ''; + super(`Unknown option '${option}'${suggestDashDash}`); + this.code = 'ERR_PARSE_ARGS_UNKNOWN_OPTION'; + } +} + +class ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL extends Error { + constructor(positional) { + super(`Unexpected argument '${positional}'. This command does not take positional arguments`); + this.code = 'ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL'; + } +} + +module.exports = { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_PARSE_ARGS_INVALID_OPTION_VALUE, + ERR_PARSE_ARGS_UNKNOWN_OPTION, + ERR_PARSE_ARGS_UNEXPECTED_POSITIONAL, + } +}; diff --git a/node_modules/@pkgjs/parseargs/internal/primordials.js b/node_modules/@pkgjs/parseargs/internal/primordials.js new file mode 100644 index 000000000..63e23ab11 --- /dev/null +++ b/node_modules/@pkgjs/parseargs/internal/primordials.js @@ -0,0 +1,393 @@ +/* +This file is copied from https://github.com/nodejs/node/blob/v14.19.3/lib/internal/per_context/primordials.js +under the following license: + +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +*/ + +'use strict'; + +/* eslint-disable node-core/prefer-primordials */ + +// This file subclasses and stores the JS builtins that come from the VM +// so that Node.js's builtin modules do not need to later look these up from +// the global proxy, which can be mutated by users. + +// Use of primordials have sometimes a dramatic impact on performance, please +// benchmark all changes made in performance-sensitive areas of the codebase. +// See: https://github.com/nodejs/node/pull/38248 + +const primordials = {}; + +const { + defineProperty: ReflectDefineProperty, + getOwnPropertyDescriptor: ReflectGetOwnPropertyDescriptor, + ownKeys: ReflectOwnKeys, +} = Reflect; + +// `uncurryThis` is equivalent to `func => Function.prototype.call.bind(func)`. +// It is using `bind.bind(call)` to avoid using `Function.prototype.bind` +// and `Function.prototype.call` after it may have been mutated by users. +const { apply, bind, call } = Function.prototype; +const uncurryThis = bind.bind(call); +primordials.uncurryThis = uncurryThis; + +// `applyBind` is equivalent to `func => Function.prototype.apply.bind(func)`. +// It is using `bind.bind(apply)` to avoid using `Function.prototype.bind` +// and `Function.prototype.apply` after it may have been mutated by users. +const applyBind = bind.bind(apply); +primordials.applyBind = applyBind; + +// Methods that accept a variable number of arguments, and thus it's useful to +// also create `${prefix}${key}Apply`, which uses `Function.prototype.apply`, +// instead of `Function.prototype.call`, and thus doesn't require iterator +// destructuring. +const varargsMethods = [ + // 'ArrayPrototypeConcat' is omitted, because it performs the spread + // on its own for arrays and array-likes with a truthy + // @@isConcatSpreadable symbol property. + 'ArrayOf', + 'ArrayPrototypePush', + 'ArrayPrototypeUnshift', + // 'FunctionPrototypeCall' is omitted, since there's 'ReflectApply' + // and 'FunctionPrototypeApply'. + 'MathHypot', + 'MathMax', + 'MathMin', + 'StringPrototypeConcat', + 'TypedArrayOf', +]; + +function getNewKey(key) { + return typeof key === 'symbol' ? + `Symbol${key.description[7].toUpperCase()}${key.description.slice(8)}` : + `${key[0].toUpperCase()}${key.slice(1)}`; +} + +function copyAccessor(dest, prefix, key, { enumerable, get, set }) { + ReflectDefineProperty(dest, `${prefix}Get${key}`, { + value: uncurryThis(get), + enumerable + }); + if (set !== undefined) { + ReflectDefineProperty(dest, `${prefix}Set${key}`, { + value: uncurryThis(set), + enumerable + }); + } +} + +function copyPropsRenamed(src, dest, prefix) { + for (const key of ReflectOwnKeys(src)) { + const newKey = getNewKey(key); + const desc = ReflectGetOwnPropertyDescriptor(src, key); + if ('get' in desc) { + copyAccessor(dest, prefix, newKey, desc); + } else { + const name = `${prefix}${newKey}`; + ReflectDefineProperty(dest, name, desc); + if (varargsMethods.includes(name)) { + ReflectDefineProperty(dest, `${name}Apply`, { + // `src` is bound as the `this` so that the static `this` points + // to the object it was defined on, + // e.g.: `ArrayOfApply` gets a `this` of `Array`: + value: applyBind(desc.value, src), + }); + } + } + } +} + +function copyPropsRenamedBound(src, dest, prefix) { + for (const key of ReflectOwnKeys(src)) { + const newKey = getNewKey(key); + const desc = ReflectGetOwnPropertyDescriptor(src, key); + if ('get' in desc) { + copyAccessor(dest, prefix, newKey, desc); + } else { + const { value } = desc; + if (typeof value === 'function') { + desc.value = value.bind(src); + } + + const name = `${prefix}${newKey}`; + ReflectDefineProperty(dest, name, desc); + if (varargsMethods.includes(name)) { + ReflectDefineProperty(dest, `${name}Apply`, { + value: applyBind(value, src), + }); + } + } + } +} + +function copyPrototype(src, dest, prefix) { + for (const key of ReflectOwnKeys(src)) { + const newKey = getNewKey(key); + const desc = ReflectGetOwnPropertyDescriptor(src, key); + if ('get' in desc) { + copyAccessor(dest, prefix, newKey, desc); + } else { + const { value } = desc; + if (typeof value === 'function') { + desc.value = uncurryThis(value); + } + + const name = `${prefix}${newKey}`; + ReflectDefineProperty(dest, name, desc); + if (varargsMethods.includes(name)) { + ReflectDefineProperty(dest, `${name}Apply`, { + value: applyBind(value), + }); + } + } + } +} + +// Create copies of configurable value properties of the global object +[ + 'Proxy', + 'globalThis', +].forEach((name) => { + // eslint-disable-next-line no-restricted-globals + primordials[name] = globalThis[name]; +}); + +// Create copies of URI handling functions +[ + decodeURI, + decodeURIComponent, + encodeURI, + encodeURIComponent, +].forEach((fn) => { + primordials[fn.name] = fn; +}); + +// Create copies of the namespace objects +[ + 'JSON', + 'Math', + 'Proxy', + 'Reflect', +].forEach((name) => { + // eslint-disable-next-line no-restricted-globals + copyPropsRenamed(global[name], primordials, name); +}); + +// Create copies of intrinsic objects +[ + 'Array', + 'ArrayBuffer', + 'BigInt', + 'BigInt64Array', + 'BigUint64Array', + 'Boolean', + 'DataView', + 'Date', + 'Error', + 'EvalError', + 'Float32Array', + 'Float64Array', + 'Function', + 'Int16Array', + 'Int32Array', + 'Int8Array', + 'Map', + 'Number', + 'Object', + 'RangeError', + 'ReferenceError', + 'RegExp', + 'Set', + 'String', + 'Symbol', + 'SyntaxError', + 'TypeError', + 'URIError', + 'Uint16Array', + 'Uint32Array', + 'Uint8Array', + 'Uint8ClampedArray', + 'WeakMap', + 'WeakSet', +].forEach((name) => { + // eslint-disable-next-line no-restricted-globals + const original = global[name]; + primordials[name] = original; + copyPropsRenamed(original, primordials, name); + copyPrototype(original.prototype, primordials, `${name}Prototype`); +}); + +// Create copies of intrinsic objects that require a valid `this` to call +// static methods. +// Refs: https://www.ecma-international.org/ecma-262/#sec-promise.all +[ + 'Promise', +].forEach((name) => { + // eslint-disable-next-line no-restricted-globals + const original = global[name]; + primordials[name] = original; + copyPropsRenamedBound(original, primordials, name); + copyPrototype(original.prototype, primordials, `${name}Prototype`); +}); + +// Create copies of abstract intrinsic objects that are not directly exposed +// on the global object. +// Refs: https://tc39.es/ecma262/#sec-%typedarray%-intrinsic-object +[ + { name: 'TypedArray', original: Reflect.getPrototypeOf(Uint8Array) }, + { name: 'ArrayIterator', original: { + prototype: Reflect.getPrototypeOf(Array.prototype[Symbol.iterator]()), + } }, + { name: 'StringIterator', original: { + prototype: Reflect.getPrototypeOf(String.prototype[Symbol.iterator]()), + } }, +].forEach(({ name, original }) => { + primordials[name] = original; + // The static %TypedArray% methods require a valid `this`, but can't be bound, + // as they need a subclass constructor as the receiver: + copyPrototype(original, primordials, name); + copyPrototype(original.prototype, primordials, `${name}Prototype`); +}); + +/* eslint-enable node-core/prefer-primordials */ + +const { + ArrayPrototypeForEach, + FunctionPrototypeCall, + Map, + ObjectFreeze, + ObjectSetPrototypeOf, + Set, + SymbolIterator, + WeakMap, + WeakSet, +} = primordials; + +// Because these functions are used by `makeSafe`, which is exposed +// on the `primordials` object, it's important to use const references +// to the primordials that they use: +const createSafeIterator = (factory, next) => { + class SafeIterator { + constructor(iterable) { + this._iterator = factory(iterable); + } + next() { + return next(this._iterator); + } + [SymbolIterator]() { + return this; + } + } + ObjectSetPrototypeOf(SafeIterator.prototype, null); + ObjectFreeze(SafeIterator.prototype); + ObjectFreeze(SafeIterator); + return SafeIterator; +}; + +primordials.SafeArrayIterator = createSafeIterator( + primordials.ArrayPrototypeSymbolIterator, + primordials.ArrayIteratorPrototypeNext +); +primordials.SafeStringIterator = createSafeIterator( + primordials.StringPrototypeSymbolIterator, + primordials.StringIteratorPrototypeNext +); + +const copyProps = (src, dest) => { + ArrayPrototypeForEach(ReflectOwnKeys(src), (key) => { + if (!ReflectGetOwnPropertyDescriptor(dest, key)) { + ReflectDefineProperty( + dest, + key, + ReflectGetOwnPropertyDescriptor(src, key)); + } + }); +}; + +const makeSafe = (unsafe, safe) => { + if (SymbolIterator in unsafe.prototype) { + const dummy = new unsafe(); + let next; // We can reuse the same `next` method. + + ArrayPrototypeForEach(ReflectOwnKeys(unsafe.prototype), (key) => { + if (!ReflectGetOwnPropertyDescriptor(safe.prototype, key)) { + const desc = ReflectGetOwnPropertyDescriptor(unsafe.prototype, key); + if ( + typeof desc.value === 'function' && + desc.value.length === 0 && + SymbolIterator in (FunctionPrototypeCall(desc.value, dummy) ?? {}) + ) { + const createIterator = uncurryThis(desc.value); + next = next ?? uncurryThis(createIterator(dummy).next); + const SafeIterator = createSafeIterator(createIterator, next); + desc.value = function() { + return new SafeIterator(this); + }; + } + ReflectDefineProperty(safe.prototype, key, desc); + } + }); + } else { + copyProps(unsafe.prototype, safe.prototype); + } + copyProps(unsafe, safe); + + ObjectSetPrototypeOf(safe.prototype, null); + ObjectFreeze(safe.prototype); + ObjectFreeze(safe); + return safe; +}; +primordials.makeSafe = makeSafe; + +// Subclass the constructors because we need to use their prototype +// methods later. +// Defining the `constructor` is necessary here to avoid the default +// constructor which uses the user-mutable `%ArrayIteratorPrototype%.next`. +primordials.SafeMap = makeSafe( + Map, + class SafeMap extends Map { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); +primordials.SafeWeakMap = makeSafe( + WeakMap, + class SafeWeakMap extends WeakMap { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); +primordials.SafeSet = makeSafe( + Set, + class SafeSet extends Set { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); +primordials.SafeWeakSet = makeSafe( + WeakSet, + class SafeWeakSet extends WeakSet { + constructor(i) { super(i); } // eslint-disable-line no-useless-constructor + } +); + +ObjectSetPrototypeOf(primordials, null); +ObjectFreeze(primordials); + +module.exports = primordials; diff --git a/node_modules/@pkgjs/parseargs/internal/util.js b/node_modules/@pkgjs/parseargs/internal/util.js new file mode 100644 index 000000000..b9b8fe5b8 --- /dev/null +++ b/node_modules/@pkgjs/parseargs/internal/util.js @@ -0,0 +1,14 @@ +'use strict'; + +// This is a placeholder for util.js in node.js land. + +const { + ObjectCreate, + ObjectFreeze, +} = require('./primordials'); + +const kEmptyObject = ObjectFreeze(ObjectCreate(null)); + +module.exports = { + kEmptyObject, +}; diff --git a/node_modules/@pkgjs/parseargs/internal/validators.js b/node_modules/@pkgjs/parseargs/internal/validators.js new file mode 100644 index 000000000..b5ac4fb50 --- /dev/null +++ b/node_modules/@pkgjs/parseargs/internal/validators.js @@ -0,0 +1,89 @@ +'use strict'; + +// This file is a proxy of the original file located at: +// https://github.com/nodejs/node/blob/main/lib/internal/validators.js +// Every addition or modification to this file must be evaluated +// during the PR review. + +const { + ArrayIsArray, + ArrayPrototypeIncludes, + ArrayPrototypeJoin, +} = require('./primordials'); + +const { + codes: { + ERR_INVALID_ARG_TYPE + } +} = require('./errors'); + +function validateString(value, name) { + if (typeof value !== 'string') { + throw new ERR_INVALID_ARG_TYPE(name, 'String', value); + } +} + +function validateUnion(value, name, union) { + if (!ArrayPrototypeIncludes(union, value)) { + throw new ERR_INVALID_ARG_TYPE(name, `('${ArrayPrototypeJoin(union, '|')}')`, value); + } +} + +function validateBoolean(value, name) { + if (typeof value !== 'boolean') { + throw new ERR_INVALID_ARG_TYPE(name, 'Boolean', value); + } +} + +function validateArray(value, name) { + if (!ArrayIsArray(value)) { + throw new ERR_INVALID_ARG_TYPE(name, 'Array', value); + } +} + +function validateStringArray(value, name) { + validateArray(value, name); + for (let i = 0; i < value.length; i++) { + validateString(value[i], `${name}[${i}]`); + } +} + +function validateBooleanArray(value, name) { + validateArray(value, name); + for (let i = 0; i < value.length; i++) { + validateBoolean(value[i], `${name}[${i}]`); + } +} + +/** + * @param {unknown} value + * @param {string} name + * @param {{ + * allowArray?: boolean, + * allowFunction?: boolean, + * nullable?: boolean + * }} [options] + */ +function validateObject(value, name, options) { + const useDefaultOptions = options == null; + const allowArray = useDefaultOptions ? false : options.allowArray; + const allowFunction = useDefaultOptions ? false : options.allowFunction; + const nullable = useDefaultOptions ? false : options.nullable; + if ((!nullable && value === null) || + (!allowArray && ArrayIsArray(value)) || + (typeof value !== 'object' && ( + !allowFunction || typeof value !== 'function' + ))) { + throw new ERR_INVALID_ARG_TYPE(name, 'Object', value); + } +} + +module.exports = { + validateArray, + validateObject, + validateString, + validateStringArray, + validateUnion, + validateBoolean, + validateBooleanArray, +}; diff --git a/node_modules/@pkgjs/parseargs/package.json b/node_modules/@pkgjs/parseargs/package.json new file mode 100644 index 000000000..0bcc05c0d --- /dev/null +++ b/node_modules/@pkgjs/parseargs/package.json @@ -0,0 +1,36 @@ +{ + "name": "@pkgjs/parseargs", + "version": "0.11.0", + "description": "Polyfill of future proposal for `util.parseArgs()`", + "engines": { + "node": ">=14" + }, + "main": "index.js", + "exports": { + ".": "./index.js", + "./package.json": "./package.json" + }, + "scripts": { + "coverage": "c8 --check-coverage tape 'test/*.js'", + "test": "c8 tape 'test/*.js'", + "posttest": "eslint .", + "fix": "npm run posttest -- --fix" + }, + "repository": { + "type": "git", + "url": "git@github.com:pkgjs/parseargs.git" + }, + "keywords": [], + "author": "", + "license": "MIT", + "bugs": { + "url": "https://github.com/pkgjs/parseargs/issues" + }, + "homepage": "https://github.com/pkgjs/parseargs#readme", + "devDependencies": { + "c8": "^7.10.0", + "eslint": "^8.2.0", + "eslint-plugin-node-core": "iansu/eslint-plugin-node-core", + "tape": "^5.2.2" + } +} diff --git a/node_modules/@pkgjs/parseargs/utils.js b/node_modules/@pkgjs/parseargs/utils.js new file mode 100644 index 000000000..d7f420a23 --- /dev/null +++ b/node_modules/@pkgjs/parseargs/utils.js @@ -0,0 +1,198 @@ +'use strict'; + +const { + ArrayPrototypeFind, + ObjectEntries, + ObjectPrototypeHasOwnProperty: ObjectHasOwn, + StringPrototypeCharAt, + StringPrototypeIncludes, + StringPrototypeStartsWith, +} = require('./internal/primordials'); + +const { + validateObject, +} = require('./internal/validators'); + +// These are internal utilities to make the parsing logic easier to read, and +// add lots of detail for the curious. They are in a separate file to allow +// unit testing, although that is not essential (this could be rolled into +// main file and just tested implicitly via API). +// +// These routines are for internal use, not for export to client. + +/** + * Return the named property, but only if it is an own property. + */ +function objectGetOwn(obj, prop) { + if (ObjectHasOwn(obj, prop)) + return obj[prop]; +} + +/** + * Return the named options property, but only if it is an own property. + */ +function optionsGetOwn(options, longOption, prop) { + if (ObjectHasOwn(options, longOption)) + return objectGetOwn(options[longOption], prop); +} + +/** + * Determines if the argument may be used as an option value. + * @example + * isOptionValue('V') // returns true + * isOptionValue('-v') // returns true (greedy) + * isOptionValue('--foo') // returns true (greedy) + * isOptionValue(undefined) // returns false + */ +function isOptionValue(value) { + if (value == null) return false; + + // Open Group Utility Conventions are that an option-argument + // is the argument after the option, and may start with a dash. + return true; // greedy! +} + +/** + * Detect whether there is possible confusion and user may have omitted + * the option argument, like `--port --verbose` when `port` of type:string. + * In strict mode we throw errors if value is option-like. + */ +function isOptionLikeValue(value) { + if (value == null) return false; + + return value.length > 1 && StringPrototypeCharAt(value, 0) === '-'; +} + +/** + * Determines if `arg` is just a short option. + * @example '-f' + */ +function isLoneShortOption(arg) { + return arg.length === 2 && + StringPrototypeCharAt(arg, 0) === '-' && + StringPrototypeCharAt(arg, 1) !== '-'; +} + +/** + * Determines if `arg` is a lone long option. + * @example + * isLoneLongOption('a') // returns false + * isLoneLongOption('-a') // returns false + * isLoneLongOption('--foo') // returns true + * isLoneLongOption('--foo=bar') // returns false + */ +function isLoneLongOption(arg) { + return arg.length > 2 && + StringPrototypeStartsWith(arg, '--') && + !StringPrototypeIncludes(arg, '=', 3); +} + +/** + * Determines if `arg` is a long option and value in the same argument. + * @example + * isLongOptionAndValue('--foo') // returns false + * isLongOptionAndValue('--foo=bar') // returns true + */ +function isLongOptionAndValue(arg) { + return arg.length > 2 && + StringPrototypeStartsWith(arg, '--') && + StringPrototypeIncludes(arg, '=', 3); +} + +/** + * Determines if `arg` is a short option group. + * + * See Guideline 5 of the [Open Group Utility Conventions](https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap12.html). + * One or more options without option-arguments, followed by at most one + * option that takes an option-argument, should be accepted when grouped + * behind one '-' delimiter. + * @example + * isShortOptionGroup('-a', {}) // returns false + * isShortOptionGroup('-ab', {}) // returns true + * // -fb is an option and a value, not a short option group + * isShortOptionGroup('-fb', { + * options: { f: { type: 'string' } } + * }) // returns false + * isShortOptionGroup('-bf', { + * options: { f: { type: 'string' } } + * }) // returns true + * // -bfb is an edge case, return true and caller sorts it out + * isShortOptionGroup('-bfb', { + * options: { f: { type: 'string' } } + * }) // returns true + */ +function isShortOptionGroup(arg, options) { + if (arg.length <= 2) return false; + if (StringPrototypeCharAt(arg, 0) !== '-') return false; + if (StringPrototypeCharAt(arg, 1) === '-') return false; + + const firstShort = StringPrototypeCharAt(arg, 1); + const longOption = findLongOptionForShort(firstShort, options); + return optionsGetOwn(options, longOption, 'type') !== 'string'; +} + +/** + * Determine if arg is a short string option followed by its value. + * @example + * isShortOptionAndValue('-a', {}); // returns false + * isShortOptionAndValue('-ab', {}); // returns false + * isShortOptionAndValue('-fFILE', { + * options: { foo: { short: 'f', type: 'string' }} + * }) // returns true + */ +function isShortOptionAndValue(arg, options) { + validateObject(options, 'options'); + + if (arg.length <= 2) return false; + if (StringPrototypeCharAt(arg, 0) !== '-') return false; + if (StringPrototypeCharAt(arg, 1) === '-') return false; + + const shortOption = StringPrototypeCharAt(arg, 1); + const longOption = findLongOptionForShort(shortOption, options); + return optionsGetOwn(options, longOption, 'type') === 'string'; +} + +/** + * Find the long option associated with a short option. Looks for a configured + * `short` and returns the short option itself if a long option is not found. + * @example + * findLongOptionForShort('a', {}) // returns 'a' + * findLongOptionForShort('b', { + * options: { bar: { short: 'b' } } + * }) // returns 'bar' + */ +function findLongOptionForShort(shortOption, options) { + validateObject(options, 'options'); + const longOptionEntry = ArrayPrototypeFind( + ObjectEntries(options), + ({ 1: optionConfig }) => objectGetOwn(optionConfig, 'short') === shortOption + ); + return longOptionEntry?.[0] ?? shortOption; +} + +/** + * Check if the given option includes a default value + * and that option has not been set by the input args. + * + * @param {string} longOption - long option name e.g. 'foo' + * @param {object} optionConfig - the option configuration properties + * @param {object} values - option values returned in `values` by parseArgs + */ +function useDefaultValueOption(longOption, optionConfig, values) { + return objectGetOwn(optionConfig, 'default') !== undefined && + values[longOption] === undefined; +} + +module.exports = { + findLongOptionForShort, + isLoneLongOption, + isLoneShortOption, + isLongOptionAndValue, + isOptionValue, + isOptionLikeValue, + isShortOptionAndValue, + isShortOptionGroup, + useDefaultValueOption, + objectGetOwn, + optionsGetOwn, +}; diff --git a/node_modules/@puppeteer/browsers/lib/cjs/CLI.js b/node_modules/@puppeteer/browsers/lib/cjs/CLI.js index 0afcf2d79..21cc6d42f 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/CLI.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/CLI.js @@ -132,6 +132,9 @@ class CLI { }); yargs.example('$0 install chrome', `Install the ${latestOrPinned} available build of the Chrome browser.`); yargs.example('$0 install chrome@latest', 'Install the latest available build for the Chrome browser.'); + yargs.example('$0 install chrome@stable', 'Install the latest available build for the Chrome browser from the stable channel.'); + yargs.example('$0 install chrome@beta', 'Install the latest available build for the Chrome browser from the beta channel.'); + yargs.example('$0 install chrome@dev', 'Install the latest available build for the Chrome browser from the dev channel.'); yargs.example('$0 install chrome@canary', 'Install the latest available build for the Chrome Canary browser.'); yargs.example('$0 install chrome@115', 'Install the latest available build for Chrome 115.'); yargs.example('$0 install chromedriver@canary', 'Install the latest available build for ChromeDriver Canary.'); @@ -141,7 +144,13 @@ class CLI { yargs.example('$0 install chrome-headless-shell@beta', 'Install the latest available chrome-headless-shell build corresponding to the Beta channel.'); yargs.example('$0 install chrome-headless-shell@118', 'Install the latest available chrome-headless-shell 118 build.'); yargs.example('$0 install chromium@1083080', 'Install the revision 1083080 of the Chromium browser.'); - yargs.example('$0 install firefox', 'Install the latest available build of the Firefox browser.'); + yargs.example('$0 install firefox', 'Install the latest nightly available build of the Firefox browser.'); + yargs.example('$0 install firefox@stable', 'Install the latest stable build of the Firefox browser.'); + yargs.example('$0 install firefox@beta', 'Install the latest beta build of the Firefox browser.'); + yargs.example('$0 install firefox@devedition', 'Install the latest devedition build of the Firefox browser.'); + yargs.example('$0 install firefox@esr', 'Install the latest ESR build of the Firefox browser.'); + yargs.example('$0 install firefox@nightly', 'Install the latest nightly build of the Firefox browser.'); + yargs.example('$0 install firefox@stable_111.0.1', 'Install a specific version of the Firefox browser.'); yargs.example('$0 install firefox --platform mac', 'Install the latest Mac (Intel) build of the Firefox browser.'); if (this.#allowCachePathOverride) { yargs.example('$0 install firefox --path /tmp/my-browser-cache', 'Install to the specified cache directory.'); @@ -159,6 +168,7 @@ class CLI { } args.browser.buildId = pinnedVersion; } + const originalBuildId = args.browser.buildId; args.browser.buildId = await (0, browser_data_js_1.resolveBuildId)(args.browser.name, args.platform, args.browser.buildId); await (0, install_js_1.install)({ browser: args.browser.name, @@ -167,6 +177,9 @@ class CLI { cacheDir: args.path ?? this.#cachePath, downloadProgressCallback: makeProgressCallback(args.browser.name, args.browser.buildId), baseUrl: args.baseUrl, + buildIdAlias: originalBuildId !== args.browser.buildId + ? originalBuildId + : undefined, }); console.log(`${args.browser.name}@${args.browser.buildId} ${(0, launch_js_1.computeExecutablePath)({ browser: args.browser.name, @@ -256,7 +269,7 @@ function makeProgressCallback(browser, buildId) { let lastDownloadedBytes = 0; return (downloadedBytes, totalBytes) => { if (!progressBar) { - progressBar = new progress_1.default(`Downloading ${browser} r${buildId} - ${toMegabytes(totalBytes)} [:bar] :percent :etas `, { + progressBar = new progress_1.default(`Downloading ${browser} ${buildId} - ${toMegabytes(totalBytes)} [:bar] :percent :etas `, { complete: '=', incomplete: ' ', width: 20, diff --git a/node_modules/@puppeteer/browsers/lib/cjs/Cache.js b/node_modules/@puppeteer/browsers/lib/cjs/Cache.js index d0512c415..a5eee16ef 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/Cache.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/Cache.js @@ -12,8 +12,10 @@ exports.Cache = exports.InstalledBrowser = void 0; const fs_1 = __importDefault(require("fs")); const os_1 = __importDefault(require("os")); const path_1 = __importDefault(require("path")); +const debug_1 = __importDefault(require("debug")); const browser_data_js_1 = require("./browser-data/browser-data.js"); const detectPlatform_js_1 = require("./detectPlatform.js"); +const debugCache = (0, debug_1.default)('puppeteer:browsers:cache'); /** * @public */ @@ -44,6 +46,12 @@ class InstalledBrowser { get path() { return this.#cache.installationDir(this.browser, this.platform, this.buildId); } + readMetadata() { + return this.#cache.readMetadata(this.browser); + } + writeMetadata(metadata) { + this.#cache.writeMetadata(this.browser, metadata); + } } exports.InstalledBrowser = InstalledBrowser; /** @@ -74,6 +82,35 @@ class Cache { browserRoot(browser) { return path_1.default.join(this.#rootDir, browser); } + metadataFile(browser) { + return path_1.default.join(this.browserRoot(browser), '.metadata'); + } + readMetadata(browser) { + const metatadaPath = this.metadataFile(browser); + if (!fs_1.default.existsSync(metatadaPath)) { + return { aliases: {} }; + } + // TODO: add type-safe parsing. + const data = JSON.parse(fs_1.default.readFileSync(metatadaPath, 'utf8')); + if (typeof data !== 'object') { + throw new Error('.metadata is not an object'); + } + return data; + } + writeMetadata(browser, metadata) { + const metatadaPath = this.metadataFile(browser); + fs_1.default.mkdirSync(path_1.default.dirname(metatadaPath), { recursive: true }); + fs_1.default.writeFileSync(metatadaPath, JSON.stringify(metadata, null, 2)); + } + resolveAlias(browser, alias) { + const metadata = this.readMetadata(browser); + if (alias === 'latest') { + return Object.values(metadata.aliases || {}) + .sort((0, browser_data_js_1.getVersionComparator)(browser)) + .at(-1); + } + return metadata.aliases[alias]; + } installationDir(browser, platform, buildId) { return path_1.default.join(this.browserRoot(browser), `${platform}-${buildId}`); } @@ -86,6 +123,12 @@ class Cache { }); } uninstall(browser, platform, buildId) { + const metadata = this.readMetadata(browser); + for (const alias of Object.keys(metadata.aliases)) { + if (metadata.aliases[alias] === buildId) { + delete metadata.aliases[alias]; + } + } fs_1.default.rmSync(this.installationDir(browser, platform, buildId), { force: true, recursive: true, @@ -121,6 +164,13 @@ class Cache { if (!options.platform) { throw new Error(`Cannot download a binary for the provided platform: ${os_1.default.platform()} (${os_1.default.arch()})`); } + try { + options.buildId = + this.resolveAlias(options.browser, options.buildId) ?? options.buildId; + } + catch { + debugCache('could not read .metadata file for the browser'); + } const installationDir = this.installationDir(options.browser, options.platform, options.buildId); return path_1.default.join(installationDir, browser_data_js_1.executablePathByBrowser[options.browser](options.platform, options.buildId)); } diff --git a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/browser-data.js b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/browser-data.js index 9f02fafe7..c25b2e322 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/browser-data.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/browser-data.js @@ -28,7 +28,7 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.resolveSystemExecutablePath = exports.createProfile = exports.resolveBuildId = exports.ChromeReleaseChannel = exports.BrowserPlatform = exports.Browser = exports.executablePathByBrowser = exports.downloadPaths = exports.downloadUrls = void 0; +exports.getVersionComparator = exports.resolveSystemExecutablePath = exports.createProfile = exports.resolveBuildId = exports.ChromeReleaseChannel = exports.BrowserPlatform = exports.Browser = exports.versionComparators = exports.executablePathByBrowser = exports.downloadPaths = exports.downloadUrls = void 0; const chromeHeadlessShell = __importStar(require("./chrome-headless-shell.js")); const chrome = __importStar(require("./chrome.js")); const chromedriver = __importStar(require("./chromedriver.js")); @@ -59,20 +59,35 @@ exports.executablePathByBrowser = { [types_js_1.Browser.CHROMIUM]: chromium.relativeExecutablePath, [types_js_1.Browser.FIREFOX]: firefox.relativeExecutablePath, }; +exports.versionComparators = { + [types_js_1.Browser.CHROMEDRIVER]: chromedriver.compareVersions, + [types_js_1.Browser.CHROMEHEADLESSSHELL]: chromeHeadlessShell.compareVersions, + [types_js_1.Browser.CHROME]: chrome.compareVersions, + [types_js_1.Browser.CHROMIUM]: chromium.compareVersions, + [types_js_1.Browser.FIREFOX]: firefox.compareVersions, +}; /** - * @public + * @internal */ -async function resolveBuildId(browser, platform, tag) { +async function resolveBuildIdForBrowserTag(browser, platform, tag) { switch (browser) { case types_js_1.Browser.FIREFOX: switch (tag) { case types_js_1.BrowserTag.LATEST: - return await firefox.resolveBuildId('FIREFOX_NIGHTLY'); + return await firefox.resolveBuildId(firefox.FirefoxChannel.NIGHTLY); case types_js_1.BrowserTag.BETA: + return await firefox.resolveBuildId(firefox.FirefoxChannel.BETA); + case types_js_1.BrowserTag.NIGHTLY: + return await firefox.resolveBuildId(firefox.FirefoxChannel.NIGHTLY); + case types_js_1.BrowserTag.DEVEDITION: + return await firefox.resolveBuildId(firefox.FirefoxChannel.DEVEDITION); + case types_js_1.BrowserTag.STABLE: + return await firefox.resolveBuildId(firefox.FirefoxChannel.STABLE); + case types_js_1.BrowserTag.ESR: + return await firefox.resolveBuildId(firefox.FirefoxChannel.ESR); case types_js_1.BrowserTag.CANARY: case types_js_1.BrowserTag.DEV: - case types_js_1.BrowserTag.STABLE: - throw new Error(`${tag} is not supported for ${browser}. Use 'latest' instead.`); + throw new Error(`${tag.toUpperCase()} is not available for Firefox`); } case types_js_1.Browser.CHROME: { switch (tag) { @@ -86,13 +101,11 @@ async function resolveBuildId(browser, platform, tag) { return await chrome.resolveBuildId(types_js_1.ChromeReleaseChannel.DEV); case types_js_1.BrowserTag.STABLE: return await chrome.resolveBuildId(types_js_1.ChromeReleaseChannel.STABLE); - default: - const result = await chrome.resolveBuildId(tag); - if (result) { - return result; - } + case types_js_1.BrowserTag.NIGHTLY: + case types_js_1.BrowserTag.DEVEDITION: + case types_js_1.BrowserTag.ESR: + throw new Error(`${tag.toUpperCase()} is not available for Chrome`); } - return tag; } case types_js_1.Browser.CHROMEDRIVER: { switch (tag) { @@ -105,13 +118,11 @@ async function resolveBuildId(browser, platform, tag) { return await chromedriver.resolveBuildId(types_js_1.ChromeReleaseChannel.DEV); case types_js_1.BrowserTag.STABLE: return await chromedriver.resolveBuildId(types_js_1.ChromeReleaseChannel.STABLE); - default: - const result = await chromedriver.resolveBuildId(tag); - if (result) { - return result; - } + case types_js_1.BrowserTag.NIGHTLY: + case types_js_1.BrowserTag.DEVEDITION: + case types_js_1.BrowserTag.ESR: + throw new Error(`${tag.toUpperCase()} is not available for ChromeDriver`); } - return tag; } case types_js_1.Browser.CHROMEHEADLESSSHELL: { switch (tag) { @@ -124,27 +135,59 @@ async function resolveBuildId(browser, platform, tag) { return await chromeHeadlessShell.resolveBuildId(types_js_1.ChromeReleaseChannel.DEV); case types_js_1.BrowserTag.STABLE: return await chromeHeadlessShell.resolveBuildId(types_js_1.ChromeReleaseChannel.STABLE); - default: - const result = await chromeHeadlessShell.resolveBuildId(tag); - if (result) { - return result; - } + case types_js_1.BrowserTag.NIGHTLY: + case types_js_1.BrowserTag.DEVEDITION: + case types_js_1.BrowserTag.ESR: + throw new Error(`${tag} is not available for chrome-headless-shell`); } - return tag; } case types_js_1.Browser.CHROMIUM: switch (tag) { case types_js_1.BrowserTag.LATEST: return await chromium.resolveBuildId(platform); - case types_js_1.BrowserTag.BETA: + case types_js_1.BrowserTag.NIGHTLY: case types_js_1.BrowserTag.CANARY: case types_js_1.BrowserTag.DEV: + case types_js_1.BrowserTag.DEVEDITION: + case types_js_1.BrowserTag.BETA: case types_js_1.BrowserTag.STABLE: - throw new Error(`${tag} is not supported for ${browser}. Use 'latest' instead.`); + case types_js_1.BrowserTag.ESR: + throw new Error(`${tag} is not supported for Chromium. Use 'latest' instead.`); } } - // We assume the tag is the buildId if it didn't match any keywords. - return tag; +} +/** + * @public + */ +async function resolveBuildId(browser, platform, tag) { + const browserTag = tag; + if (Object.values(types_js_1.BrowserTag).includes(browserTag)) { + return await resolveBuildIdForBrowserTag(browser, platform, browserTag); + } + switch (browser) { + case types_js_1.Browser.FIREFOX: + return tag; + case types_js_1.Browser.CHROME: + const chromeResult = await chrome.resolveBuildId(tag); + if (chromeResult) { + return chromeResult; + } + return tag; + case types_js_1.Browser.CHROMEDRIVER: + const chromeDriverResult = await chromedriver.resolveBuildId(tag); + if (chromeDriverResult) { + return chromeDriverResult; + } + return tag; + case types_js_1.Browser.CHROMEHEADLESSSHELL: + const chromeHeadlessShellResult = await chromeHeadlessShell.resolveBuildId(tag); + if (chromeHeadlessShellResult) { + return chromeHeadlessShellResult; + } + return tag; + case types_js_1.Browser.CHROMIUM: + return tag; + } } exports.resolveBuildId = resolveBuildId; /** @@ -175,4 +218,14 @@ function resolveSystemExecutablePath(browser, platform, channel) { } } exports.resolveSystemExecutablePath = resolveSystemExecutablePath; +/** + * Returns a version comparator for the given browser that can be used to sort + * browser versions. + * + * @public + */ +function getVersionComparator(browser) { + return exports.versionComparators[browser]; +} +exports.getVersionComparator = getVersionComparator; //# sourceMappingURL=browser-data.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chrome-headless-shell.js b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chrome-headless-shell.js index d6f2136af..2add8432c 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chrome-headless-shell.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chrome-headless-shell.js @@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.resolveBuildId = exports.relativeExecutablePath = exports.resolveDownloadPath = exports.resolveDownloadUrl = void 0; +exports.compareVersions = exports.resolveBuildId = exports.relativeExecutablePath = exports.resolveDownloadPath = exports.resolveDownloadUrl = void 0; /** * @license * Copyright 2023 Google Inc. @@ -25,7 +25,7 @@ function folder(platform) { return 'win64'; } } -function resolveDownloadUrl(platform, buildId, baseUrl = 'https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing') { +function resolveDownloadUrl(platform, buildId, baseUrl = 'https://storage.googleapis.com/chrome-for-testing-public') { return `${baseUrl}/${resolveDownloadPath(platform, buildId).join('/')}`; } exports.resolveDownloadUrl = resolveDownloadUrl; @@ -52,4 +52,5 @@ function relativeExecutablePath(platform, _buildId) { exports.relativeExecutablePath = relativeExecutablePath; var chrome_js_1 = require("./chrome.js"); Object.defineProperty(exports, "resolveBuildId", { enumerable: true, get: function () { return chrome_js_1.resolveBuildId; } }); +Object.defineProperty(exports, "compareVersions", { enumerable: true, get: function () { return chrome_js_1.compareVersions; } }); //# sourceMappingURL=chrome-headless-shell.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chrome.js b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chrome.js index b642a7c51..30b67ddba 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chrome.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chrome.js @@ -8,8 +8,9 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.resolveSystemExecutablePath = exports.resolveBuildId = exports.getLastKnownGoodReleaseForBuild = exports.getLastKnownGoodReleaseForMilestone = exports.getLastKnownGoodReleaseForChannel = exports.relativeExecutablePath = exports.resolveDownloadPath = exports.resolveDownloadUrl = void 0; +exports.compareVersions = exports.resolveSystemExecutablePath = exports.resolveBuildId = exports.getLastKnownGoodReleaseForBuild = exports.getLastKnownGoodReleaseForMilestone = exports.getLastKnownGoodReleaseForChannel = exports.relativeExecutablePath = exports.resolveDownloadPath = exports.resolveDownloadUrl = void 0; const path_1 = __importDefault(require("path")); +const semver_1 = __importDefault(require("semver")); const httpUtil_js_1 = require("../httpUtil.js"); const types_js_1 = require("./types.js"); function folder(platform) { @@ -26,7 +27,7 @@ function folder(platform) { return 'win64'; } } -function resolveDownloadUrl(platform, buildId, baseUrl = 'https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing') { +function resolveDownloadUrl(platform, buildId, baseUrl = 'https://storage.googleapis.com/chrome-for-testing-public') { return `${baseUrl}/${resolveDownloadPath(platform, buildId).join('/')}`; } exports.resolveDownloadUrl = resolveDownloadUrl; @@ -124,4 +125,22 @@ function resolveSystemExecutablePath(platform, channel) { throw new Error(`Unable to detect browser executable path for '${channel}' on ${platform}.`); } exports.resolveSystemExecutablePath = resolveSystemExecutablePath; +function compareVersions(a, b) { + if (!semver_1.default.valid(a)) { + throw new Error(`Version ${a} is not a valid semver version`); + } + if (!semver_1.default.valid(b)) { + throw new Error(`Version ${b} is not a valid semver version`); + } + if (semver_1.default.gt(a, b)) { + return 1; + } + else if (semver_1.default.lt(a, b)) { + return -1; + } + else { + return 0; + } +} +exports.compareVersions = compareVersions; //# sourceMappingURL=chrome.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chromedriver.js b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chromedriver.js index d3f2bd512..aa6fe18c9 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chromedriver.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chromedriver.js @@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.resolveBuildId = exports.relativeExecutablePath = exports.resolveDownloadPath = exports.resolveDownloadUrl = void 0; +exports.compareVersions = exports.resolveBuildId = exports.relativeExecutablePath = exports.resolveDownloadPath = exports.resolveDownloadUrl = void 0; /** * @license * Copyright 2023 Google Inc. @@ -25,7 +25,7 @@ function folder(platform) { return 'win64'; } } -function resolveDownloadUrl(platform, buildId, baseUrl = 'https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing') { +function resolveDownloadUrl(platform, buildId, baseUrl = 'https://storage.googleapis.com/chrome-for-testing-public') { return `${baseUrl}/${resolveDownloadPath(platform, buildId).join('/')}`; } exports.resolveDownloadUrl = resolveDownloadUrl; @@ -48,4 +48,5 @@ function relativeExecutablePath(platform, _buildId) { exports.relativeExecutablePath = relativeExecutablePath; var chrome_js_1 = require("./chrome.js"); Object.defineProperty(exports, "resolveBuildId", { enumerable: true, get: function () { return chrome_js_1.resolveBuildId; } }); +Object.defineProperty(exports, "compareVersions", { enumerable: true, get: function () { return chrome_js_1.compareVersions; } }); //# sourceMappingURL=chromedriver.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chromium.js b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chromium.js index 86b38ac66..e9faeafaa 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chromium.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/chromium.js @@ -8,7 +8,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.resolveBuildId = exports.relativeExecutablePath = exports.resolveDownloadPath = exports.resolveDownloadUrl = void 0; +exports.compareVersions = exports.resolveBuildId = exports.relativeExecutablePath = exports.resolveDownloadPath = exports.resolveDownloadUrl = void 0; const path_1 = __importDefault(require("path")); const httpUtil_js_1 = require("../httpUtil.js"); const types_js_1 = require("./types.js"); @@ -64,4 +64,8 @@ async function resolveBuildId(platform) { return await (0, httpUtil_js_1.getText)(new URL(`https://storage.googleapis.com/chromium-browser-snapshots/${folder(platform)}/LAST_CHANGE`)); } exports.resolveBuildId = resolveBuildId; +function compareVersions(a, b) { + return Number(a) - Number(b); +} +exports.compareVersions = compareVersions; //# sourceMappingURL=chromium.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/firefox.js b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/firefox.js index 51202dfe2..63b3738fe 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/firefox.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/firefox.js @@ -8,12 +8,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.createProfile = exports.resolveBuildId = exports.relativeExecutablePath = exports.resolveDownloadPath = exports.resolveDownloadUrl = void 0; +exports.compareVersions = exports.createProfile = exports.resolveBuildId = exports.FirefoxChannel = exports.relativeExecutablePath = exports.resolveDownloadPath = exports.resolveDownloadUrl = void 0; const fs_1 = __importDefault(require("fs")); const path_1 = __importDefault(require("path")); const httpUtil_js_1 = require("../httpUtil.js"); const types_js_1 = require("./types.js"); -function archive(platform, buildId) { +function archiveNightly(platform, buildId) { switch (platform) { case types_js_1.BrowserPlatform.LINUX: return `firefox-${buildId}.en-US.${platform}-x86_64.tar.bz2`; @@ -25,34 +25,124 @@ function archive(platform, buildId) { return `firefox-${buildId}.en-US.${platform}.zip`; } } -function resolveDownloadUrl(platform, buildId, baseUrl = 'https://archive.mozilla.org/pub/firefox/nightly/latest-mozilla-central') { - return `${baseUrl}/${resolveDownloadPath(platform, buildId).join('/')}`; -} -exports.resolveDownloadUrl = resolveDownloadUrl; -function resolveDownloadPath(platform, buildId) { - return [archive(platform, buildId)]; -} -exports.resolveDownloadPath = resolveDownloadPath; -function relativeExecutablePath(platform, _buildId) { +function archive(platform, buildId) { switch (platform) { + case types_js_1.BrowserPlatform.LINUX: + return `firefox-${buildId}.tar.bz2`; case types_js_1.BrowserPlatform.MAC_ARM: case types_js_1.BrowserPlatform.MAC: - return path_1.default.join('Firefox Nightly.app', 'Contents', 'MacOS', 'firefox'); + return `Firefox ${buildId}.dmg`; + case types_js_1.BrowserPlatform.WIN32: + case types_js_1.BrowserPlatform.WIN64: + return `Firefox Setup ${buildId}.exe`; + } +} +function platformName(platform) { + switch (platform) { case types_js_1.BrowserPlatform.LINUX: - return path_1.default.join('firefox', 'firefox'); + return `linux-x86_64`; + case types_js_1.BrowserPlatform.MAC_ARM: + case types_js_1.BrowserPlatform.MAC: + return `mac`; case types_js_1.BrowserPlatform.WIN32: case types_js_1.BrowserPlatform.WIN64: - return path_1.default.join('firefox', 'firefox.exe'); + return platform; + } +} +function parseBuildId(buildId) { + for (const value of Object.values(FirefoxChannel)) { + if (buildId.startsWith(value + '_')) { + buildId = buildId.substring(value.length + 1); + return [value, buildId]; + } + } + // Older versions do not have channel as the prefix.« + return [FirefoxChannel.NIGHTLY, buildId]; +} +function resolveDownloadUrl(platform, buildId, baseUrl) { + const [channel, resolvedBuildId] = parseBuildId(buildId); + switch (channel) { + case FirefoxChannel.NIGHTLY: + baseUrl ??= + 'https://archive.mozilla.org/pub/firefox/nightly/latest-mozilla-central'; + break; + case FirefoxChannel.DEVEDITION: + baseUrl ??= 'https://archive.mozilla.org/pub/devedition/releases'; + break; + case FirefoxChannel.BETA: + case FirefoxChannel.STABLE: + case FirefoxChannel.ESR: + baseUrl ??= 'https://archive.mozilla.org/pub/firefox/releases'; + break; + } + switch (channel) { + case FirefoxChannel.NIGHTLY: + return `${baseUrl}/${resolveDownloadPath(platform, resolvedBuildId).join('/')}`; + case FirefoxChannel.DEVEDITION: + case FirefoxChannel.BETA: + case FirefoxChannel.STABLE: + case FirefoxChannel.ESR: + return `${baseUrl}/${resolvedBuildId}/${platformName(platform)}/en-US/${archive(platform, resolvedBuildId)}`; + } +} +exports.resolveDownloadUrl = resolveDownloadUrl; +function resolveDownloadPath(platform, buildId) { + return [archiveNightly(platform, buildId)]; +} +exports.resolveDownloadPath = resolveDownloadPath; +function relativeExecutablePath(platform, buildId) { + const [channel] = parseBuildId(buildId); + switch (channel) { + case FirefoxChannel.NIGHTLY: + switch (platform) { + case types_js_1.BrowserPlatform.MAC_ARM: + case types_js_1.BrowserPlatform.MAC: + return path_1.default.join('Firefox Nightly.app', 'Contents', 'MacOS', 'firefox'); + case types_js_1.BrowserPlatform.LINUX: + return path_1.default.join('firefox', 'firefox'); + case types_js_1.BrowserPlatform.WIN32: + case types_js_1.BrowserPlatform.WIN64: + return path_1.default.join('firefox', 'firefox.exe'); + } + case FirefoxChannel.BETA: + case FirefoxChannel.DEVEDITION: + case FirefoxChannel.ESR: + case FirefoxChannel.STABLE: + switch (platform) { + case types_js_1.BrowserPlatform.MAC_ARM: + case types_js_1.BrowserPlatform.MAC: + return path_1.default.join('Firefox.app', 'Contents', 'MacOS', 'firefox'); + case types_js_1.BrowserPlatform.LINUX: + return path_1.default.join('firefox', 'firefox'); + case types_js_1.BrowserPlatform.WIN32: + case types_js_1.BrowserPlatform.WIN64: + return path_1.default.join('core', 'firefox.exe'); + } } } exports.relativeExecutablePath = relativeExecutablePath; -async function resolveBuildId(channel = 'FIREFOX_NIGHTLY') { +var FirefoxChannel; +(function (FirefoxChannel) { + FirefoxChannel["STABLE"] = "stable"; + FirefoxChannel["ESR"] = "esr"; + FirefoxChannel["DEVEDITION"] = "devedition"; + FirefoxChannel["BETA"] = "beta"; + FirefoxChannel["NIGHTLY"] = "nightly"; +})(FirefoxChannel || (exports.FirefoxChannel = FirefoxChannel = {})); +async function resolveBuildId(channel = FirefoxChannel.NIGHTLY) { + const channelToVersionKey = { + [FirefoxChannel.ESR]: 'FIREFOX_ESR', + [FirefoxChannel.STABLE]: 'LATEST_FIREFOX_VERSION', + [FirefoxChannel.DEVEDITION]: 'FIREFOX_DEVEDITION', + [FirefoxChannel.BETA]: 'FIREFOX_DEVEDITION', + [FirefoxChannel.NIGHTLY]: 'FIREFOX_NIGHTLY', + }; const versions = (await (0, httpUtil_js_1.getJSON)(new URL('https://product-details.mozilla.org/1.0/firefox_versions.json'))); - const version = versions[channel]; + const version = versions[channelToVersionKey[channel]]; if (!version) { throw new Error(`Channel ${channel} is not found.`); } - return version; + return channel + '_' + version; } exports.resolveBuildId = resolveBuildId; async function createProfile(options) { @@ -186,9 +276,6 @@ function defaultProfilePreferences(extraPrefs) { 'media.gmp-manager.updateEnabled': false, // Disable the GFX sanity window 'media.sanity-test.disabled': true, - // Prevent various error message on the console - // jest-puppeteer asserts that no error message is emitted by the console - 'network.cookie.cookieBehavior': 0, // Disable experimental feature that is only available in Nightly 'network.cookie.sameSite.laxByDefault': false, // Do not prompt for temporary redirects @@ -243,15 +330,29 @@ function defaultProfilePreferences(extraPrefs) { * @param profilePath - Firefox profile to write the preferences to. */ async function writePreferences(options) { + const prefsPath = path_1.default.join(options.path, 'prefs.js'); const lines = Object.entries(options.preferences).map(([key, value]) => { return `user_pref(${JSON.stringify(key)}, ${JSON.stringify(value)});`; }); - await fs_1.default.promises.writeFile(path_1.default.join(options.path, 'user.js'), lines.join('\n')); - // Create a backup of the preferences file if it already exitsts. - const prefsPath = path_1.default.join(options.path, 'prefs.js'); - if (fs_1.default.existsSync(prefsPath)) { - const prefsBackupPath = path_1.default.join(options.path, 'prefs.js.puppeteer'); - await fs_1.default.promises.copyFile(prefsPath, prefsBackupPath); + // Use allSettled to prevent corruption + const result = await Promise.allSettled([ + fs_1.default.promises.writeFile(path_1.default.join(options.path, 'user.js'), lines.join('\n')), + // Create a backup of the preferences file if it already exitsts. + fs_1.default.promises.access(prefsPath, fs_1.default.constants.F_OK).then(async () => { + await fs_1.default.promises.copyFile(prefsPath, path_1.default.join(options.path, 'prefs.js.puppeteer')); + }, + // Swallow only if file does not exist + () => { }), + ]); + for (const command of result) { + if (command.status === 'rejected') { + throw command.reason; + } } } +function compareVersions(a, b) { + // TODO: this is a not very reliable check. + return parseInt(a.replace('.', ''), 16) - parseInt(b.replace('.', ''), 16); +} +exports.compareVersions = compareVersions; //# sourceMappingURL=firefox.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/types.js b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/types.js index c6619a9f5..f99752d6f 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/browser-data/types.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/browser-data/types.js @@ -39,9 +39,12 @@ var BrowserPlatform; var BrowserTag; (function (BrowserTag) { BrowserTag["CANARY"] = "canary"; + BrowserTag["NIGHTLY"] = "nightly"; BrowserTag["BETA"] = "beta"; BrowserTag["DEV"] = "dev"; + BrowserTag["DEVEDITION"] = "devedition"; BrowserTag["STABLE"] = "stable"; + BrowserTag["ESR"] = "esr"; BrowserTag["LATEST"] = "latest"; })(BrowserTag || (exports.BrowserTag = BrowserTag = {})); /** diff --git a/node_modules/@puppeteer/browsers/lib/cjs/fileUtil.js b/node_modules/@puppeteer/browsers/lib/cjs/fileUtil.js index 725f8e599..82ca7f0b3 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/fileUtil.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/fileUtil.js @@ -55,6 +55,17 @@ async function unpackArchive(archivePath, folderPath) { await (0, promises_1.mkdir)(folderPath); await installDMG(archivePath, folderPath); } + else if (archivePath.endsWith('.exe')) { + // Firefox on Windows. + const result = (0, child_process_1.spawnSync)(archivePath, [`/ExtractDir=${folderPath}`], { + env: { + __compat_layer: 'RunAsInvoker', + }, + }); + if (result.status !== 0) { + throw new Error(`Failed to extract ${archivePath} to ${folderPath}: ${result.output}`); + } + } else { throw new Error(`Unsupported archive format: ${archivePath}`); } diff --git a/node_modules/@puppeteer/browsers/lib/cjs/httpUtil.js b/node_modules/@puppeteer/browsers/lib/cjs/httpUtil.js index baf50fc7e..94717a121 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/httpUtil.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/httpUtil.js @@ -64,6 +64,9 @@ function httpRequest(url, method, response, keepAlive = true) { res.statusCode < 400 && res.headers.location) { httpRequest(new url_1.URL(res.headers.location), method, response); + // consume response data to free up memory + // And prevents the connection from being kept alive + res.resume(); } else { response(res); diff --git a/node_modules/@puppeteer/browsers/lib/cjs/install.js b/node_modules/@puppeteer/browsers/lib/cjs/install.js index 26881150b..2c3449001 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/install.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/install.js @@ -41,6 +41,59 @@ async function install(options) { throw new Error(`Cannot download a binary for the provided platform: ${os_1.default.platform()} (${os_1.default.arch()})`); } const url = getDownloadUrl(options.browser, options.platform, options.buildId, options.baseUrl); + try { + return await installUrl(url, options); + } + catch (err) { + // If custom baseUrl is provided, do not fall back to CfT dashboard. + if (options.baseUrl && !options.forceFallbackForTesting) { + throw err; + } + debugInstall(`Error downloading from ${url}.`); + switch (options.browser) { + case browser_data_js_1.Browser.CHROME: + case browser_data_js_1.Browser.CHROMEDRIVER: + case browser_data_js_1.Browser.CHROMEHEADLESSSHELL: { + debugInstall(`Trying to find download URL via https://googlechromelabs.github.io/chrome-for-testing.`); + const version = (await (0, httpUtil_js_1.getJSON)(new URL(`https://googlechromelabs.github.io/chrome-for-testing/${options.buildId}.json`))); + let platform = ''; + switch (options.platform) { + case browser_data_js_1.BrowserPlatform.LINUX: + platform = 'linux64'; + break; + case browser_data_js_1.BrowserPlatform.MAC_ARM: + platform = 'mac-arm64'; + break; + case browser_data_js_1.BrowserPlatform.MAC: + platform = 'mac-x64'; + break; + case browser_data_js_1.BrowserPlatform.WIN32: + platform = 'win32'; + break; + case browser_data_js_1.BrowserPlatform.WIN64: + platform = 'win64'; + break; + } + const url = version.downloads[options.browser]?.find(link => { + return link['platform'] === platform; + })?.url; + if (url) { + debugInstall(`Falling back to downloading from ${url}.`); + return await installUrl(new URL(url), options); + } + throw err; + } + default: + throw err; + } + } +} +exports.install = install; +async function installUrl(url, options) { + options.platform ??= (0, detectPlatform_js_1.detectBrowserPlatform)(); + if (!options.platform) { + throw new Error(`Cannot download a binary for the provided platform: ${os_1.default.platform()} (${os_1.default.arch()})`); + } const fileName = url.toString().split('/').pop(); (0, assert_1.default)(fileName, `A malformed download URL was found: ${url}.`); const cache = new Cache_js_1.Cache(options.cacheDir); @@ -60,10 +113,14 @@ async function install(options) { return archivePath; } const outputPath = cache.installationDir(options.browser, options.platform, options.buildId); - if ((0, fs_1.existsSync)(outputPath)) { - return new Cache_js_1.InstalledBrowser(cache, options.browser, options.buildId, options.platform); - } try { + if ((0, fs_1.existsSync)(outputPath)) { + const installedBrowser = new Cache_js_1.InstalledBrowser(cache, options.browser, options.buildId, options.platform); + if (!(0, fs_1.existsSync)(installedBrowser.executablePath)) { + throw new Error(`The browser folder (${outputPath}) exists but the executable (${installedBrowser.executablePath}) is missing`); + } + return installedBrowser; + } debugInstall(`Downloading binary from ${url}`); try { debugTime('download'); @@ -80,15 +137,20 @@ async function install(options) { finally { debugTimeEnd('extract'); } + const installedBrowser = new Cache_js_1.InstalledBrowser(cache, options.browser, options.buildId, options.platform); + if (options.buildIdAlias) { + const metadata = installedBrowser.readMetadata(); + metadata.aliases[options.buildIdAlias] = options.buildId; + installedBrowser.writeMetadata(metadata); + } + return installedBrowser; } finally { if ((0, fs_1.existsSync)(archivePath)) { await (0, promises_1.unlink)(archivePath); } } - return new Cache_js_1.InstalledBrowser(cache, options.browser, options.buildId, options.platform); } -exports.install = install; /** * * @public diff --git a/node_modules/@puppeteer/browsers/lib/cjs/launch.js b/node_modules/@puppeteer/browsers/lib/cjs/launch.js index 45cbaf064..3bb92985f 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/launch.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/launch.js @@ -58,6 +58,49 @@ exports.CDP_WEBSOCKET_ENDPOINT_REGEX = /^DevTools listening on (ws:\/\/.*)$/; * @public */ exports.WEBDRIVER_BIDI_WEBSOCKET_ENDPOINT_REGEX = /^WebDriver BiDi listening on (ws:\/\/.*)$/; +const processListeners = new Map(); +const dispatchers = { + exit: (...args) => { + processListeners.get('exit')?.forEach(handler => { + return handler(...args); + }); + }, + SIGINT: (...args) => { + processListeners.get('SIGINT')?.forEach(handler => { + return handler(...args); + }); + }, + SIGHUP: (...args) => { + processListeners.get('SIGHUP')?.forEach(handler => { + return handler(...args); + }); + }, + SIGTERM: (...args) => { + processListeners.get('SIGTERM')?.forEach(handler => { + return handler(...args); + }); + }, +}; +function subscribeToProcessEvent(event, handler) { + const listeners = processListeners.get(event) || []; + if (listeners.length === 0) { + process.on(event, dispatchers[event]); + } + listeners.push(handler); + processListeners.set(event, listeners); +} +function unsubscribeFromProcessEvent(event, handler) { + const listeners = processListeners.get(event) || []; + const existingListenerIdx = listeners.indexOf(handler); + if (existingListenerIdx === -1) { + return; + } + listeners.splice(existingListenerIdx, 1); + processListeners.set(event, listeners); + if (listeners.length === 0) { + process.off(event, dispatchers[event]); + } +} /** * @public */ @@ -110,15 +153,15 @@ class Process { this.#browserProcess.stderr?.pipe(process.stderr); this.#browserProcess.stdout?.pipe(process.stdout); } - process.on('exit', this.#onDriverProcessExit); + subscribeToProcessEvent('exit', this.#onDriverProcessExit); if (opts.handleSIGINT) { - process.on('SIGINT', this.#onDriverProcessSignal); + subscribeToProcessEvent('SIGINT', this.#onDriverProcessSignal); } if (opts.handleSIGTERM) { - process.on('SIGTERM', this.#onDriverProcessSignal); + subscribeToProcessEvent('SIGTERM', this.#onDriverProcessSignal); } if (opts.handleSIGHUP) { - process.on('SIGHUP', this.#onDriverProcessSignal); + subscribeToProcessEvent('SIGHUP', this.#onDriverProcessSignal); } if (opts.onExit) { this.#onExitHook = opts.onExit; @@ -168,10 +211,10 @@ class Process { } } #clearListeners() { - process.off('exit', this.#onDriverProcessExit); - process.off('SIGINT', this.#onDriverProcessSignal); - process.off('SIGTERM', this.#onDriverProcessSignal); - process.off('SIGHUP', this.#onDriverProcessSignal); + unsubscribeFromProcessEvent('exit', this.#onDriverProcessExit); + unsubscribeFromProcessEvent('SIGINT', this.#onDriverProcessSignal); + unsubscribeFromProcessEvent('SIGTERM', this.#onDriverProcessSignal); + unsubscribeFromProcessEvent('SIGHUP', this.#onDriverProcessSignal); } #onDriverProcessExit = (_code) => { this.kill(); diff --git a/node_modules/@puppeteer/browsers/lib/cjs/main.js b/node_modules/@puppeteer/browsers/lib/cjs/main.js index c0453ed96..b4cdbb1f2 100644 --- a/node_modules/@puppeteer/browsers/lib/cjs/main.js +++ b/node_modules/@puppeteer/browsers/lib/cjs/main.js @@ -5,7 +5,7 @@ * SPDX-License-Identifier: Apache-2.0 */ Object.defineProperty(exports, "__esModule", { value: true }); -exports.InstalledBrowser = exports.Cache = exports.makeProgressCallback = exports.CLI = exports.createProfile = exports.ChromeReleaseChannel = exports.BrowserPlatform = exports.Browser = exports.resolveBuildId = exports.detectBrowserPlatform = exports.uninstall = exports.canDownload = exports.getInstalledBrowsers = exports.install = exports.Process = exports.WEBDRIVER_BIDI_WEBSOCKET_ENDPOINT_REGEX = exports.CDP_WEBSOCKET_ENDPOINT_REGEX = exports.TimeoutError = exports.computeSystemExecutablePath = exports.computeExecutablePath = exports.launch = void 0; +exports.InstalledBrowser = exports.Cache = exports.makeProgressCallback = exports.CLI = exports.getVersionComparator = exports.createProfile = exports.ChromeReleaseChannel = exports.BrowserPlatform = exports.Browser = exports.resolveBuildId = exports.detectBrowserPlatform = exports.uninstall = exports.canDownload = exports.getInstalledBrowsers = exports.install = exports.Process = exports.WEBDRIVER_BIDI_WEBSOCKET_ENDPOINT_REGEX = exports.CDP_WEBSOCKET_ENDPOINT_REGEX = exports.TimeoutError = exports.computeSystemExecutablePath = exports.computeExecutablePath = exports.launch = void 0; var launch_js_1 = require("./launch.js"); Object.defineProperty(exports, "launch", { enumerable: true, get: function () { return launch_js_1.launch; } }); Object.defineProperty(exports, "computeExecutablePath", { enumerable: true, get: function () { return launch_js_1.computeExecutablePath; } }); @@ -27,6 +27,7 @@ Object.defineProperty(exports, "Browser", { enumerable: true, get: function () { Object.defineProperty(exports, "BrowserPlatform", { enumerable: true, get: function () { return browser_data_js_1.BrowserPlatform; } }); Object.defineProperty(exports, "ChromeReleaseChannel", { enumerable: true, get: function () { return browser_data_js_1.ChromeReleaseChannel; } }); Object.defineProperty(exports, "createProfile", { enumerable: true, get: function () { return browser_data_js_1.createProfile; } }); +Object.defineProperty(exports, "getVersionComparator", { enumerable: true, get: function () { return browser_data_js_1.getVersionComparator; } }); var CLI_js_1 = require("./CLI.js"); Object.defineProperty(exports, "CLI", { enumerable: true, get: function () { return CLI_js_1.CLI; } }); Object.defineProperty(exports, "makeProgressCallback", { enumerable: true, get: function () { return CLI_js_1.makeProgressCallback; } }); diff --git a/node_modules/@puppeteer/browsers/lib/esm/CLI.js b/node_modules/@puppeteer/browsers/lib/esm/CLI.js index fc2a72548..86aa31283 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/CLI.js +++ b/node_modules/@puppeteer/browsers/lib/esm/CLI.js @@ -103,6 +103,9 @@ export class CLI { }); yargs.example('$0 install chrome', `Install the ${latestOrPinned} available build of the Chrome browser.`); yargs.example('$0 install chrome@latest', 'Install the latest available build for the Chrome browser.'); + yargs.example('$0 install chrome@stable', 'Install the latest available build for the Chrome browser from the stable channel.'); + yargs.example('$0 install chrome@beta', 'Install the latest available build for the Chrome browser from the beta channel.'); + yargs.example('$0 install chrome@dev', 'Install the latest available build for the Chrome browser from the dev channel.'); yargs.example('$0 install chrome@canary', 'Install the latest available build for the Chrome Canary browser.'); yargs.example('$0 install chrome@115', 'Install the latest available build for Chrome 115.'); yargs.example('$0 install chromedriver@canary', 'Install the latest available build for ChromeDriver Canary.'); @@ -112,7 +115,13 @@ export class CLI { yargs.example('$0 install chrome-headless-shell@beta', 'Install the latest available chrome-headless-shell build corresponding to the Beta channel.'); yargs.example('$0 install chrome-headless-shell@118', 'Install the latest available chrome-headless-shell 118 build.'); yargs.example('$0 install chromium@1083080', 'Install the revision 1083080 of the Chromium browser.'); - yargs.example('$0 install firefox', 'Install the latest available build of the Firefox browser.'); + yargs.example('$0 install firefox', 'Install the latest nightly available build of the Firefox browser.'); + yargs.example('$0 install firefox@stable', 'Install the latest stable build of the Firefox browser.'); + yargs.example('$0 install firefox@beta', 'Install the latest beta build of the Firefox browser.'); + yargs.example('$0 install firefox@devedition', 'Install the latest devedition build of the Firefox browser.'); + yargs.example('$0 install firefox@esr', 'Install the latest ESR build of the Firefox browser.'); + yargs.example('$0 install firefox@nightly', 'Install the latest nightly build of the Firefox browser.'); + yargs.example('$0 install firefox@stable_111.0.1', 'Install a specific version of the Firefox browser.'); yargs.example('$0 install firefox --platform mac', 'Install the latest Mac (Intel) build of the Firefox browser.'); if (this.#allowCachePathOverride) { yargs.example('$0 install firefox --path /tmp/my-browser-cache', 'Install to the specified cache directory.'); @@ -130,6 +139,7 @@ export class CLI { } args.browser.buildId = pinnedVersion; } + const originalBuildId = args.browser.buildId; args.browser.buildId = await resolveBuildId(args.browser.name, args.platform, args.browser.buildId); await install({ browser: args.browser.name, @@ -138,6 +148,9 @@ export class CLI { cacheDir: args.path ?? this.#cachePath, downloadProgressCallback: makeProgressCallback(args.browser.name, args.browser.buildId), baseUrl: args.baseUrl, + buildIdAlias: originalBuildId !== args.browser.buildId + ? originalBuildId + : undefined, }); console.log(`${args.browser.name}@${args.browser.buildId} ${computeExecutablePath({ browser: args.browser.name, @@ -226,7 +239,7 @@ export function makeProgressCallback(browser, buildId) { let lastDownloadedBytes = 0; return (downloadedBytes, totalBytes) => { if (!progressBar) { - progressBar = new ProgressBar(`Downloading ${browser} r${buildId} - ${toMegabytes(totalBytes)} [:bar] :percent :etas `, { + progressBar = new ProgressBar(`Downloading ${browser} ${buildId} - ${toMegabytes(totalBytes)} [:bar] :percent :etas `, { complete: '=', incomplete: ' ', width: 20, diff --git a/node_modules/@puppeteer/browsers/lib/esm/Cache.js b/node_modules/@puppeteer/browsers/lib/esm/Cache.js index 0af8720d0..bacf33081 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/Cache.js +++ b/node_modules/@puppeteer/browsers/lib/esm/Cache.js @@ -6,8 +6,10 @@ import fs from 'fs'; import os from 'os'; import path from 'path'; -import { Browser, executablePathByBrowser, } from './browser-data/browser-data.js'; +import debug from 'debug'; +import { Browser, executablePathByBrowser, getVersionComparator, } from './browser-data/browser-data.js'; import { detectBrowserPlatform } from './detectPlatform.js'; +const debugCache = debug('puppeteer:browsers:cache'); /** * @public */ @@ -38,6 +40,12 @@ export class InstalledBrowser { get path() { return this.#cache.installationDir(this.browser, this.platform, this.buildId); } + readMetadata() { + return this.#cache.readMetadata(this.browser); + } + writeMetadata(metadata) { + this.#cache.writeMetadata(this.browser, metadata); + } } /** * The cache used by Puppeteer relies on the following structure: @@ -67,6 +75,35 @@ export class Cache { browserRoot(browser) { return path.join(this.#rootDir, browser); } + metadataFile(browser) { + return path.join(this.browserRoot(browser), '.metadata'); + } + readMetadata(browser) { + const metatadaPath = this.metadataFile(browser); + if (!fs.existsSync(metatadaPath)) { + return { aliases: {} }; + } + // TODO: add type-safe parsing. + const data = JSON.parse(fs.readFileSync(metatadaPath, 'utf8')); + if (typeof data !== 'object') { + throw new Error('.metadata is not an object'); + } + return data; + } + writeMetadata(browser, metadata) { + const metatadaPath = this.metadataFile(browser); + fs.mkdirSync(path.dirname(metatadaPath), { recursive: true }); + fs.writeFileSync(metatadaPath, JSON.stringify(metadata, null, 2)); + } + resolveAlias(browser, alias) { + const metadata = this.readMetadata(browser); + if (alias === 'latest') { + return Object.values(metadata.aliases || {}) + .sort(getVersionComparator(browser)) + .at(-1); + } + return metadata.aliases[alias]; + } installationDir(browser, platform, buildId) { return path.join(this.browserRoot(browser), `${platform}-${buildId}`); } @@ -79,6 +116,12 @@ export class Cache { }); } uninstall(browser, platform, buildId) { + const metadata = this.readMetadata(browser); + for (const alias of Object.keys(metadata.aliases)) { + if (metadata.aliases[alias] === buildId) { + delete metadata.aliases[alias]; + } + } fs.rmSync(this.installationDir(browser, platform, buildId), { force: true, recursive: true, @@ -114,6 +157,13 @@ export class Cache { if (!options.platform) { throw new Error(`Cannot download a binary for the provided platform: ${os.platform()} (${os.arch()})`); } + try { + options.buildId = + this.resolveAlias(options.browser, options.buildId) ?? options.buildId; + } + catch { + debugCache('could not read .metadata file for the browser'); + } const installationDir = this.installationDir(options.browser, options.platform, options.buildId); return path.join(installationDir, executablePathByBrowser[options.browser](options.platform, options.buildId)); } diff --git a/node_modules/@puppeteer/browsers/lib/esm/browser-data/browser-data.js b/node_modules/@puppeteer/browsers/lib/esm/browser-data/browser-data.js index 1afc9c64b..eb9ed1637 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/browser-data/browser-data.js +++ b/node_modules/@puppeteer/browsers/lib/esm/browser-data/browser-data.js @@ -30,21 +30,36 @@ export const executablePathByBrowser = { [Browser.CHROMIUM]: chromium.relativeExecutablePath, [Browser.FIREFOX]: firefox.relativeExecutablePath, }; +export const versionComparators = { + [Browser.CHROMEDRIVER]: chromedriver.compareVersions, + [Browser.CHROMEHEADLESSSHELL]: chromeHeadlessShell.compareVersions, + [Browser.CHROME]: chrome.compareVersions, + [Browser.CHROMIUM]: chromium.compareVersions, + [Browser.FIREFOX]: firefox.compareVersions, +}; export { Browser, BrowserPlatform, ChromeReleaseChannel }; /** - * @public + * @internal */ -export async function resolveBuildId(browser, platform, tag) { +async function resolveBuildIdForBrowserTag(browser, platform, tag) { switch (browser) { case Browser.FIREFOX: switch (tag) { case BrowserTag.LATEST: - return await firefox.resolveBuildId('FIREFOX_NIGHTLY'); + return await firefox.resolveBuildId(firefox.FirefoxChannel.NIGHTLY); case BrowserTag.BETA: + return await firefox.resolveBuildId(firefox.FirefoxChannel.BETA); + case BrowserTag.NIGHTLY: + return await firefox.resolveBuildId(firefox.FirefoxChannel.NIGHTLY); + case BrowserTag.DEVEDITION: + return await firefox.resolveBuildId(firefox.FirefoxChannel.DEVEDITION); + case BrowserTag.STABLE: + return await firefox.resolveBuildId(firefox.FirefoxChannel.STABLE); + case BrowserTag.ESR: + return await firefox.resolveBuildId(firefox.FirefoxChannel.ESR); case BrowserTag.CANARY: case BrowserTag.DEV: - case BrowserTag.STABLE: - throw new Error(`${tag} is not supported for ${browser}. Use 'latest' instead.`); + throw new Error(`${tag.toUpperCase()} is not available for Firefox`); } case Browser.CHROME: { switch (tag) { @@ -58,13 +73,11 @@ export async function resolveBuildId(browser, platform, tag) { return await chrome.resolveBuildId(ChromeReleaseChannel.DEV); case BrowserTag.STABLE: return await chrome.resolveBuildId(ChromeReleaseChannel.STABLE); - default: - const result = await chrome.resolveBuildId(tag); - if (result) { - return result; - } + case BrowserTag.NIGHTLY: + case BrowserTag.DEVEDITION: + case BrowserTag.ESR: + throw new Error(`${tag.toUpperCase()} is not available for Chrome`); } - return tag; } case Browser.CHROMEDRIVER: { switch (tag) { @@ -77,13 +90,11 @@ export async function resolveBuildId(browser, platform, tag) { return await chromedriver.resolveBuildId(ChromeReleaseChannel.DEV); case BrowserTag.STABLE: return await chromedriver.resolveBuildId(ChromeReleaseChannel.STABLE); - default: - const result = await chromedriver.resolveBuildId(tag); - if (result) { - return result; - } + case BrowserTag.NIGHTLY: + case BrowserTag.DEVEDITION: + case BrowserTag.ESR: + throw new Error(`${tag.toUpperCase()} is not available for ChromeDriver`); } - return tag; } case Browser.CHROMEHEADLESSSHELL: { switch (tag) { @@ -96,27 +107,59 @@ export async function resolveBuildId(browser, platform, tag) { return await chromeHeadlessShell.resolveBuildId(ChromeReleaseChannel.DEV); case BrowserTag.STABLE: return await chromeHeadlessShell.resolveBuildId(ChromeReleaseChannel.STABLE); - default: - const result = await chromeHeadlessShell.resolveBuildId(tag); - if (result) { - return result; - } + case BrowserTag.NIGHTLY: + case BrowserTag.DEVEDITION: + case BrowserTag.ESR: + throw new Error(`${tag} is not available for chrome-headless-shell`); } - return tag; } case Browser.CHROMIUM: switch (tag) { case BrowserTag.LATEST: return await chromium.resolveBuildId(platform); - case BrowserTag.BETA: + case BrowserTag.NIGHTLY: case BrowserTag.CANARY: case BrowserTag.DEV: + case BrowserTag.DEVEDITION: + case BrowserTag.BETA: case BrowserTag.STABLE: - throw new Error(`${tag} is not supported for ${browser}. Use 'latest' instead.`); + case BrowserTag.ESR: + throw new Error(`${tag} is not supported for Chromium. Use 'latest' instead.`); } } - // We assume the tag is the buildId if it didn't match any keywords. - return tag; +} +/** + * @public + */ +export async function resolveBuildId(browser, platform, tag) { + const browserTag = tag; + if (Object.values(BrowserTag).includes(browserTag)) { + return await resolveBuildIdForBrowserTag(browser, platform, browserTag); + } + switch (browser) { + case Browser.FIREFOX: + return tag; + case Browser.CHROME: + const chromeResult = await chrome.resolveBuildId(tag); + if (chromeResult) { + return chromeResult; + } + return tag; + case Browser.CHROMEDRIVER: + const chromeDriverResult = await chromedriver.resolveBuildId(tag); + if (chromeDriverResult) { + return chromeDriverResult; + } + return tag; + case Browser.CHROMEHEADLESSSHELL: + const chromeHeadlessShellResult = await chromeHeadlessShell.resolveBuildId(tag); + if (chromeHeadlessShellResult) { + return chromeHeadlessShellResult; + } + return tag; + case Browser.CHROMIUM: + return tag; + } } /** * @public @@ -144,4 +187,13 @@ export function resolveSystemExecutablePath(browser, platform, channel) { return chrome.resolveSystemExecutablePath(platform, channel); } } +/** + * Returns a version comparator for the given browser that can be used to sort + * browser versions. + * + * @public + */ +export function getVersionComparator(browser) { + return versionComparators[browser]; +} //# sourceMappingURL=browser-data.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/lib/esm/browser-data/chrome-headless-shell.js b/node_modules/@puppeteer/browsers/lib/esm/browser-data/chrome-headless-shell.js index 870b234f3..4c31584aa 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/browser-data/chrome-headless-shell.js +++ b/node_modules/@puppeteer/browsers/lib/esm/browser-data/chrome-headless-shell.js @@ -19,7 +19,7 @@ function folder(platform) { return 'win64'; } } -export function resolveDownloadUrl(platform, buildId, baseUrl = 'https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing') { +export function resolveDownloadUrl(platform, buildId, baseUrl = 'https://storage.googleapis.com/chrome-for-testing-public') { return `${baseUrl}/${resolveDownloadPath(platform, buildId).join('/')}`; } export function resolveDownloadPath(platform, buildId) { @@ -41,5 +41,5 @@ export function relativeExecutablePath(platform, _buildId) { return path.join('chrome-headless-shell-' + folder(platform), 'chrome-headless-shell.exe'); } } -export { resolveBuildId } from './chrome.js'; +export { resolveBuildId, compareVersions } from './chrome.js'; //# sourceMappingURL=chrome-headless-shell.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/lib/esm/browser-data/chrome.js b/node_modules/@puppeteer/browsers/lib/esm/browser-data/chrome.js index 9de4c0d4c..12ef15fd2 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/browser-data/chrome.js +++ b/node_modules/@puppeteer/browsers/lib/esm/browser-data/chrome.js @@ -4,6 +4,7 @@ * SPDX-License-Identifier: Apache-2.0 */ import path from 'path'; +import semver from 'semver'; import { getJSON } from '../httpUtil.js'; import { BrowserPlatform, ChromeReleaseChannel } from './types.js'; function folder(platform) { @@ -20,7 +21,7 @@ function folder(platform) { return 'win64'; } } -export function resolveDownloadUrl(platform, buildId, baseUrl = 'https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing') { +export function resolveDownloadUrl(platform, buildId, baseUrl = 'https://storage.googleapis.com/chrome-for-testing-public') { return `${baseUrl}/${resolveDownloadPath(platform, buildId).join('/')}`; } export function resolveDownloadPath(platform, buildId) { @@ -110,4 +111,21 @@ export function resolveSystemExecutablePath(platform, channel) { } throw new Error(`Unable to detect browser executable path for '${channel}' on ${platform}.`); } +export function compareVersions(a, b) { + if (!semver.valid(a)) { + throw new Error(`Version ${a} is not a valid semver version`); + } + if (!semver.valid(b)) { + throw new Error(`Version ${b} is not a valid semver version`); + } + if (semver.gt(a, b)) { + return 1; + } + else if (semver.lt(a, b)) { + return -1; + } + else { + return 0; + } +} //# sourceMappingURL=chrome.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/lib/esm/browser-data/chromedriver.js b/node_modules/@puppeteer/browsers/lib/esm/browser-data/chromedriver.js index d98891bdc..3e31d2f40 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/browser-data/chromedriver.js +++ b/node_modules/@puppeteer/browsers/lib/esm/browser-data/chromedriver.js @@ -19,7 +19,7 @@ function folder(platform) { return 'win64'; } } -export function resolveDownloadUrl(platform, buildId, baseUrl = 'https://edgedl.me.gvt1.com/edgedl/chrome/chrome-for-testing') { +export function resolveDownloadUrl(platform, buildId, baseUrl = 'https://storage.googleapis.com/chrome-for-testing-public') { return `${baseUrl}/${resolveDownloadPath(platform, buildId).join('/')}`; } export function resolveDownloadPath(platform, buildId) { @@ -37,5 +37,5 @@ export function relativeExecutablePath(platform, _buildId) { return path.join('chromedriver-' + folder(platform), 'chromedriver.exe'); } } -export { resolveBuildId } from './chrome.js'; +export { resolveBuildId, compareVersions } from './chrome.js'; //# sourceMappingURL=chromedriver.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/lib/esm/browser-data/chromium.js b/node_modules/@puppeteer/browsers/lib/esm/browser-data/chromium.js index a6627b9b7..0e0228ae0 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/browser-data/chromium.js +++ b/node_modules/@puppeteer/browsers/lib/esm/browser-data/chromium.js @@ -54,4 +54,7 @@ export function relativeExecutablePath(platform, _buildId) { export async function resolveBuildId(platform) { return await getText(new URL(`https://storage.googleapis.com/chromium-browser-snapshots/${folder(platform)}/LAST_CHANGE`)); } +export function compareVersions(a, b) { + return Number(a) - Number(b); +} //# sourceMappingURL=chromium.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/lib/esm/browser-data/firefox.js b/node_modules/@puppeteer/browsers/lib/esm/browser-data/firefox.js index 9e0d5e3c8..580d581e1 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/browser-data/firefox.js +++ b/node_modules/@puppeteer/browsers/lib/esm/browser-data/firefox.js @@ -7,7 +7,7 @@ import fs from 'fs'; import path from 'path'; import { getJSON } from '../httpUtil.js'; import { BrowserPlatform } from './types.js'; -function archive(platform, buildId) { +function archiveNightly(platform, buildId) { switch (platform) { case BrowserPlatform.LINUX: return `firefox-${buildId}.en-US.${platform}-x86_64.tar.bz2`; @@ -19,31 +19,121 @@ function archive(platform, buildId) { return `firefox-${buildId}.en-US.${platform}.zip`; } } -export function resolveDownloadUrl(platform, buildId, baseUrl = 'https://archive.mozilla.org/pub/firefox/nightly/latest-mozilla-central') { - return `${baseUrl}/${resolveDownloadPath(platform, buildId).join('/')}`; -} -export function resolveDownloadPath(platform, buildId) { - return [archive(platform, buildId)]; -} -export function relativeExecutablePath(platform, _buildId) { +function archive(platform, buildId) { switch (platform) { + case BrowserPlatform.LINUX: + return `firefox-${buildId}.tar.bz2`; case BrowserPlatform.MAC_ARM: case BrowserPlatform.MAC: - return path.join('Firefox Nightly.app', 'Contents', 'MacOS', 'firefox'); + return `Firefox ${buildId}.dmg`; + case BrowserPlatform.WIN32: + case BrowserPlatform.WIN64: + return `Firefox Setup ${buildId}.exe`; + } +} +function platformName(platform) { + switch (platform) { case BrowserPlatform.LINUX: - return path.join('firefox', 'firefox'); + return `linux-x86_64`; + case BrowserPlatform.MAC_ARM: + case BrowserPlatform.MAC: + return `mac`; case BrowserPlatform.WIN32: case BrowserPlatform.WIN64: - return path.join('firefox', 'firefox.exe'); + return platform; + } +} +function parseBuildId(buildId) { + for (const value of Object.values(FirefoxChannel)) { + if (buildId.startsWith(value + '_')) { + buildId = buildId.substring(value.length + 1); + return [value, buildId]; + } + } + // Older versions do not have channel as the prefix.« + return [FirefoxChannel.NIGHTLY, buildId]; +} +export function resolveDownloadUrl(platform, buildId, baseUrl) { + const [channel, resolvedBuildId] = parseBuildId(buildId); + switch (channel) { + case FirefoxChannel.NIGHTLY: + baseUrl ??= + 'https://archive.mozilla.org/pub/firefox/nightly/latest-mozilla-central'; + break; + case FirefoxChannel.DEVEDITION: + baseUrl ??= 'https://archive.mozilla.org/pub/devedition/releases'; + break; + case FirefoxChannel.BETA: + case FirefoxChannel.STABLE: + case FirefoxChannel.ESR: + baseUrl ??= 'https://archive.mozilla.org/pub/firefox/releases'; + break; + } + switch (channel) { + case FirefoxChannel.NIGHTLY: + return `${baseUrl}/${resolveDownloadPath(platform, resolvedBuildId).join('/')}`; + case FirefoxChannel.DEVEDITION: + case FirefoxChannel.BETA: + case FirefoxChannel.STABLE: + case FirefoxChannel.ESR: + return `${baseUrl}/${resolvedBuildId}/${platformName(platform)}/en-US/${archive(platform, resolvedBuildId)}`; + } +} +export function resolveDownloadPath(platform, buildId) { + return [archiveNightly(platform, buildId)]; +} +export function relativeExecutablePath(platform, buildId) { + const [channel] = parseBuildId(buildId); + switch (channel) { + case FirefoxChannel.NIGHTLY: + switch (platform) { + case BrowserPlatform.MAC_ARM: + case BrowserPlatform.MAC: + return path.join('Firefox Nightly.app', 'Contents', 'MacOS', 'firefox'); + case BrowserPlatform.LINUX: + return path.join('firefox', 'firefox'); + case BrowserPlatform.WIN32: + case BrowserPlatform.WIN64: + return path.join('firefox', 'firefox.exe'); + } + case FirefoxChannel.BETA: + case FirefoxChannel.DEVEDITION: + case FirefoxChannel.ESR: + case FirefoxChannel.STABLE: + switch (platform) { + case BrowserPlatform.MAC_ARM: + case BrowserPlatform.MAC: + return path.join('Firefox.app', 'Contents', 'MacOS', 'firefox'); + case BrowserPlatform.LINUX: + return path.join('firefox', 'firefox'); + case BrowserPlatform.WIN32: + case BrowserPlatform.WIN64: + return path.join('core', 'firefox.exe'); + } } } -export async function resolveBuildId(channel = 'FIREFOX_NIGHTLY') { +export var FirefoxChannel; +(function (FirefoxChannel) { + FirefoxChannel["STABLE"] = "stable"; + FirefoxChannel["ESR"] = "esr"; + FirefoxChannel["DEVEDITION"] = "devedition"; + FirefoxChannel["BETA"] = "beta"; + FirefoxChannel["NIGHTLY"] = "nightly"; +})(FirefoxChannel || (FirefoxChannel = {})); +export async function resolveBuildId(channel = FirefoxChannel.NIGHTLY) { + const channelToVersionKey = { + [FirefoxChannel.ESR]: 'FIREFOX_ESR', + [FirefoxChannel.STABLE]: 'LATEST_FIREFOX_VERSION', + [FirefoxChannel.DEVEDITION]: 'FIREFOX_DEVEDITION', + [FirefoxChannel.BETA]: 'FIREFOX_DEVEDITION', + [FirefoxChannel.NIGHTLY]: 'FIREFOX_NIGHTLY', + }; const versions = (await getJSON(new URL('https://product-details.mozilla.org/1.0/firefox_versions.json'))); - const version = versions[channel]; + const version = versions[channelToVersionKey[channel]]; if (!version) { throw new Error(`Channel ${channel} is not found.`); } - return version; + return channel + '_' + version; } export async function createProfile(options) { if (!fs.existsSync(options.path)) { @@ -175,9 +265,6 @@ function defaultProfilePreferences(extraPrefs) { 'media.gmp-manager.updateEnabled': false, // Disable the GFX sanity window 'media.sanity-test.disabled': true, - // Prevent various error message on the console - // jest-puppeteer asserts that no error message is emitted by the console - 'network.cookie.cookieBehavior': 0, // Disable experimental feature that is only available in Nightly 'network.cookie.sameSite.laxByDefault': false, // Do not prompt for temporary redirects @@ -232,15 +319,28 @@ function defaultProfilePreferences(extraPrefs) { * @param profilePath - Firefox profile to write the preferences to. */ async function writePreferences(options) { + const prefsPath = path.join(options.path, 'prefs.js'); const lines = Object.entries(options.preferences).map(([key, value]) => { return `user_pref(${JSON.stringify(key)}, ${JSON.stringify(value)});`; }); - await fs.promises.writeFile(path.join(options.path, 'user.js'), lines.join('\n')); - // Create a backup of the preferences file if it already exitsts. - const prefsPath = path.join(options.path, 'prefs.js'); - if (fs.existsSync(prefsPath)) { - const prefsBackupPath = path.join(options.path, 'prefs.js.puppeteer'); - await fs.promises.copyFile(prefsPath, prefsBackupPath); + // Use allSettled to prevent corruption + const result = await Promise.allSettled([ + fs.promises.writeFile(path.join(options.path, 'user.js'), lines.join('\n')), + // Create a backup of the preferences file if it already exitsts. + fs.promises.access(prefsPath, fs.constants.F_OK).then(async () => { + await fs.promises.copyFile(prefsPath, path.join(options.path, 'prefs.js.puppeteer')); + }, + // Swallow only if file does not exist + () => { }), + ]); + for (const command of result) { + if (command.status === 'rejected') { + throw command.reason; + } } } +export function compareVersions(a, b) { + // TODO: this is a not very reliable check. + return parseInt(a.replace('.', ''), 16) - parseInt(b.replace('.', ''), 16); +} //# sourceMappingURL=firefox.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/lib/esm/browser-data/types.js b/node_modules/@puppeteer/browsers/lib/esm/browser-data/types.js index 9fab5337b..55219e5fd 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/browser-data/types.js +++ b/node_modules/@puppeteer/browsers/lib/esm/browser-data/types.js @@ -36,9 +36,12 @@ export var BrowserPlatform; export var BrowserTag; (function (BrowserTag) { BrowserTag["CANARY"] = "canary"; + BrowserTag["NIGHTLY"] = "nightly"; BrowserTag["BETA"] = "beta"; BrowserTag["DEV"] = "dev"; + BrowserTag["DEVEDITION"] = "devedition"; BrowserTag["STABLE"] = "stable"; + BrowserTag["ESR"] = "esr"; BrowserTag["LATEST"] = "latest"; })(BrowserTag || (BrowserTag = {})); /** diff --git a/node_modules/@puppeteer/browsers/lib/esm/fileUtil.js b/node_modules/@puppeteer/browsers/lib/esm/fileUtil.js index e2b2d601a..213448c80 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/fileUtil.js +++ b/node_modules/@puppeteer/browsers/lib/esm/fileUtil.js @@ -3,7 +3,7 @@ * Copyright 2023 Google Inc. * SPDX-License-Identifier: Apache-2.0 */ -import { exec as execChildProcess } from 'child_process'; +import { exec as execChildProcess, spawnSync } from 'child_process'; import { createReadStream } from 'fs'; import { mkdir, readdir } from 'fs/promises'; import * as path from 'path'; @@ -26,6 +26,17 @@ export async function unpackArchive(archivePath, folderPath) { await mkdir(folderPath); await installDMG(archivePath, folderPath); } + else if (archivePath.endsWith('.exe')) { + // Firefox on Windows. + const result = spawnSync(archivePath, [`/ExtractDir=${folderPath}`], { + env: { + __compat_layer: 'RunAsInvoker', + }, + }); + if (result.status !== 0) { + throw new Error(`Failed to extract ${archivePath} to ${folderPath}: ${result.output}`); + } + } else { throw new Error(`Unsupported archive format: ${archivePath}`); } diff --git a/node_modules/@puppeteer/browsers/lib/esm/httpUtil.js b/node_modules/@puppeteer/browsers/lib/esm/httpUtil.js index 10640affe..a5710dd38 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/httpUtil.js +++ b/node_modules/@puppeteer/browsers/lib/esm/httpUtil.js @@ -37,6 +37,9 @@ export function httpRequest(url, method, response, keepAlive = true) { res.statusCode < 400 && res.headers.location) { httpRequest(new URL(res.headers.location), method, response); + // consume response data to free up memory + // And prevents the connection from being kept alive + res.resume(); } else { response(res); diff --git a/node_modules/@puppeteer/browsers/lib/esm/install.js b/node_modules/@puppeteer/browsers/lib/esm/install.js index 356ea37f2..af03a730b 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/install.js +++ b/node_modules/@puppeteer/browsers/lib/esm/install.js @@ -8,12 +8,12 @@ import { existsSync } from 'fs'; import { mkdir, unlink } from 'fs/promises'; import os from 'os'; import path from 'path'; -import { downloadUrls, } from './browser-data/browser-data.js'; +import { Browser, BrowserPlatform, downloadUrls, } from './browser-data/browser-data.js'; import { Cache, InstalledBrowser } from './Cache.js'; import { debug } from './debug.js'; import { detectBrowserPlatform } from './detectPlatform.js'; import { unpackArchive } from './fileUtil.js'; -import { downloadFile, headHttpRequest } from './httpUtil.js'; +import { downloadFile, getJSON, headHttpRequest } from './httpUtil.js'; const debugInstall = debug('puppeteer:browsers:install'); const times = new Map(); function debugTime(label) { @@ -35,6 +35,58 @@ export async function install(options) { throw new Error(`Cannot download a binary for the provided platform: ${os.platform()} (${os.arch()})`); } const url = getDownloadUrl(options.browser, options.platform, options.buildId, options.baseUrl); + try { + return await installUrl(url, options); + } + catch (err) { + // If custom baseUrl is provided, do not fall back to CfT dashboard. + if (options.baseUrl && !options.forceFallbackForTesting) { + throw err; + } + debugInstall(`Error downloading from ${url}.`); + switch (options.browser) { + case Browser.CHROME: + case Browser.CHROMEDRIVER: + case Browser.CHROMEHEADLESSSHELL: { + debugInstall(`Trying to find download URL via https://googlechromelabs.github.io/chrome-for-testing.`); + const version = (await getJSON(new URL(`https://googlechromelabs.github.io/chrome-for-testing/${options.buildId}.json`))); + let platform = ''; + switch (options.platform) { + case BrowserPlatform.LINUX: + platform = 'linux64'; + break; + case BrowserPlatform.MAC_ARM: + platform = 'mac-arm64'; + break; + case BrowserPlatform.MAC: + platform = 'mac-x64'; + break; + case BrowserPlatform.WIN32: + platform = 'win32'; + break; + case BrowserPlatform.WIN64: + platform = 'win64'; + break; + } + const url = version.downloads[options.browser]?.find(link => { + return link['platform'] === platform; + })?.url; + if (url) { + debugInstall(`Falling back to downloading from ${url}.`); + return await installUrl(new URL(url), options); + } + throw err; + } + default: + throw err; + } + } +} +async function installUrl(url, options) { + options.platform ??= detectBrowserPlatform(); + if (!options.platform) { + throw new Error(`Cannot download a binary for the provided platform: ${os.platform()} (${os.arch()})`); + } const fileName = url.toString().split('/').pop(); assert(fileName, `A malformed download URL was found: ${url}.`); const cache = new Cache(options.cacheDir); @@ -54,10 +106,14 @@ export async function install(options) { return archivePath; } const outputPath = cache.installationDir(options.browser, options.platform, options.buildId); - if (existsSync(outputPath)) { - return new InstalledBrowser(cache, options.browser, options.buildId, options.platform); - } try { + if (existsSync(outputPath)) { + const installedBrowser = new InstalledBrowser(cache, options.browser, options.buildId, options.platform); + if (!existsSync(installedBrowser.executablePath)) { + throw new Error(`The browser folder (${outputPath}) exists but the executable (${installedBrowser.executablePath}) is missing`); + } + return installedBrowser; + } debugInstall(`Downloading binary from ${url}`); try { debugTime('download'); @@ -74,13 +130,19 @@ export async function install(options) { finally { debugTimeEnd('extract'); } + const installedBrowser = new InstalledBrowser(cache, options.browser, options.buildId, options.platform); + if (options.buildIdAlias) { + const metadata = installedBrowser.readMetadata(); + metadata.aliases[options.buildIdAlias] = options.buildId; + installedBrowser.writeMetadata(metadata); + } + return installedBrowser; } finally { if (existsSync(archivePath)) { await unlink(archivePath); } } - return new InstalledBrowser(cache, options.browser, options.buildId, options.platform); } /** * diff --git a/node_modules/@puppeteer/browsers/lib/esm/launch.js b/node_modules/@puppeteer/browsers/lib/esm/launch.js index 5f6cac9f6..d1d0b5bf2 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/launch.js +++ b/node_modules/@puppeteer/browsers/lib/esm/launch.js @@ -49,6 +49,49 @@ export const CDP_WEBSOCKET_ENDPOINT_REGEX = /^DevTools listening on (ws:\/\/.*)$ * @public */ export const WEBDRIVER_BIDI_WEBSOCKET_ENDPOINT_REGEX = /^WebDriver BiDi listening on (ws:\/\/.*)$/; +const processListeners = new Map(); +const dispatchers = { + exit: (...args) => { + processListeners.get('exit')?.forEach(handler => { + return handler(...args); + }); + }, + SIGINT: (...args) => { + processListeners.get('SIGINT')?.forEach(handler => { + return handler(...args); + }); + }, + SIGHUP: (...args) => { + processListeners.get('SIGHUP')?.forEach(handler => { + return handler(...args); + }); + }, + SIGTERM: (...args) => { + processListeners.get('SIGTERM')?.forEach(handler => { + return handler(...args); + }); + }, +}; +function subscribeToProcessEvent(event, handler) { + const listeners = processListeners.get(event) || []; + if (listeners.length === 0) { + process.on(event, dispatchers[event]); + } + listeners.push(handler); + processListeners.set(event, listeners); +} +function unsubscribeFromProcessEvent(event, handler) { + const listeners = processListeners.get(event) || []; + const existingListenerIdx = listeners.indexOf(handler); + if (existingListenerIdx === -1) { + return; + } + listeners.splice(existingListenerIdx, 1); + processListeners.set(event, listeners); + if (listeners.length === 0) { + process.off(event, dispatchers[event]); + } +} /** * @public */ @@ -101,15 +144,15 @@ export class Process { this.#browserProcess.stderr?.pipe(process.stderr); this.#browserProcess.stdout?.pipe(process.stdout); } - process.on('exit', this.#onDriverProcessExit); + subscribeToProcessEvent('exit', this.#onDriverProcessExit); if (opts.handleSIGINT) { - process.on('SIGINT', this.#onDriverProcessSignal); + subscribeToProcessEvent('SIGINT', this.#onDriverProcessSignal); } if (opts.handleSIGTERM) { - process.on('SIGTERM', this.#onDriverProcessSignal); + subscribeToProcessEvent('SIGTERM', this.#onDriverProcessSignal); } if (opts.handleSIGHUP) { - process.on('SIGHUP', this.#onDriverProcessSignal); + subscribeToProcessEvent('SIGHUP', this.#onDriverProcessSignal); } if (opts.onExit) { this.#onExitHook = opts.onExit; @@ -159,10 +202,10 @@ export class Process { } } #clearListeners() { - process.off('exit', this.#onDriverProcessExit); - process.off('SIGINT', this.#onDriverProcessSignal); - process.off('SIGTERM', this.#onDriverProcessSignal); - process.off('SIGHUP', this.#onDriverProcessSignal); + unsubscribeFromProcessEvent('exit', this.#onDriverProcessExit); + unsubscribeFromProcessEvent('SIGINT', this.#onDriverProcessSignal); + unsubscribeFromProcessEvent('SIGTERM', this.#onDriverProcessSignal); + unsubscribeFromProcessEvent('SIGHUP', this.#onDriverProcessSignal); } #onDriverProcessExit = (_code) => { this.kill(); diff --git a/node_modules/@puppeteer/browsers/lib/esm/main.js b/node_modules/@puppeteer/browsers/lib/esm/main.js index a2f70080d..22f36eedf 100644 --- a/node_modules/@puppeteer/browsers/lib/esm/main.js +++ b/node_modules/@puppeteer/browsers/lib/esm/main.js @@ -6,7 +6,7 @@ export { launch, computeExecutablePath, computeSystemExecutablePath, TimeoutError, CDP_WEBSOCKET_ENDPOINT_REGEX, WEBDRIVER_BIDI_WEBSOCKET_ENDPOINT_REGEX, Process, } from './launch.js'; export { install, getInstalledBrowsers, canDownload, uninstall, } from './install.js'; export { detectBrowserPlatform } from './detectPlatform.js'; -export { resolveBuildId, Browser, BrowserPlatform, ChromeReleaseChannel, createProfile, } from './browser-data/browser-data.js'; +export { resolveBuildId, Browser, BrowserPlatform, ChromeReleaseChannel, createProfile, getVersionComparator, } from './browser-data/browser-data.js'; export { CLI, makeProgressCallback } from './CLI.js'; export { Cache, InstalledBrowser } from './Cache.js'; //# sourceMappingURL=main.js.map \ No newline at end of file diff --git a/node_modules/@puppeteer/browsers/node_modules/debug/package.json b/node_modules/@puppeteer/browsers/node_modules/debug/package.json new file mode 100644 index 000000000..3bcdc242f --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/debug/package.json @@ -0,0 +1,59 @@ +{ + "name": "debug", + "version": "4.3.4", + "repository": { + "type": "git", + "url": "git://github.com/debug-js/debug.git" + }, + "description": "Lightweight debugging utility for Node.js and the browser", + "keywords": [ + "debug", + "log", + "debugger" + ], + "files": [ + "src", + "LICENSE", + "README.md" + ], + "author": "Josh Junon ", + "contributors": [ + "TJ Holowaychuk ", + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "scripts": { + "lint": "xo", + "test": "npm run test:node && npm run test:browser && npm run lint", + "test:node": "istanbul cover _mocha -- test.js", + "test:browser": "karma start --single-run", + "test:coverage": "cat ./coverage/lcov.info | coveralls" + }, + "dependencies": { + "ms": "2.1.2" + }, + "devDependencies": { + "brfs": "^2.0.1", + "browserify": "^16.2.3", + "coveralls": "^3.0.2", + "istanbul": "^0.4.5", + "karma": "^3.1.4", + "karma-browserify": "^6.0.0", + "karma-chrome-launcher": "^2.2.0", + "karma-mocha": "^1.3.0", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "xo": "^0.23.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "engines": { + "node": ">=6.0" + } +} diff --git a/node_modules/@puppeteer/browsers/node_modules/debug/src/browser.js b/node_modules/@puppeteer/browsers/node_modules/debug/src/browser.js new file mode 100644 index 000000000..cd0fc35d1 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/debug/src/browser.js @@ -0,0 +1,269 @@ +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug'); + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; diff --git a/node_modules/@puppeteer/browsers/node_modules/debug/src/common.js b/node_modules/@puppeteer/browsers/node_modules/debug/src/common.js new file mode 100644 index 000000000..e3291b20f --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/debug/src/common.js @@ -0,0 +1,274 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + let i; + const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/); + const len = split.length; + + for (i = 0; i < len; i++) { + if (!split[i]) { + // ignore empty strings + continue; + } + + namespaces = split[i].replace(/\*/g, '.*?'); + + if (namespaces[0] === '-') { + createDebug.skips.push(new RegExp('^' + namespaces.slice(1) + '$')); + } else { + createDebug.names.push(new RegExp('^' + namespaces + '$')); + } + } + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names.map(toNamespace), + ...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + if (name[name.length - 1] === '*') { + return true; + } + + let i; + let len; + + for (i = 0, len = createDebug.skips.length; i < len; i++) { + if (createDebug.skips[i].test(name)) { + return false; + } + } + + for (i = 0, len = createDebug.names.length; i < len; i++) { + if (createDebug.names[i].test(name)) { + return true; + } + } + + return false; + } + + /** + * Convert regexp to namespace + * + * @param {RegExp} regxep + * @return {String} namespace + * @api private + */ + function toNamespace(regexp) { + return regexp.toString() + .substring(2, regexp.toString().length - 2) + .replace(/\.\*\?$/, '*'); + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; diff --git a/node_modules/@puppeteer/browsers/node_modules/debug/src/index.js b/node_modules/@puppeteer/browsers/node_modules/debug/src/index.js new file mode 100644 index 000000000..bf4c57f25 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/node_modules/@puppeteer/browsers/node_modules/debug/src/node.js b/node_modules/@puppeteer/browsers/node_modules/debug/src/node.js new file mode 100644 index 000000000..79bc085cb --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/debug/src/node.js @@ -0,0 +1,263 @@ +/** + * Module dependencies. + */ + +const tty = require('tty'); +const util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = require('supports-color'); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.format()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.format(...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; diff --git a/node_modules/@puppeteer/browsers/node_modules/lru-cache/index.js b/node_modules/@puppeteer/browsers/node_modules/lru-cache/index.js new file mode 100644 index 000000000..573b6b85b --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/lru-cache/index.js @@ -0,0 +1,334 @@ +'use strict' + +// A linked list to keep track of recently-used-ness +const Yallist = require('yallist') + +const MAX = Symbol('max') +const LENGTH = Symbol('length') +const LENGTH_CALCULATOR = Symbol('lengthCalculator') +const ALLOW_STALE = Symbol('allowStale') +const MAX_AGE = Symbol('maxAge') +const DISPOSE = Symbol('dispose') +const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet') +const LRU_LIST = Symbol('lruList') +const CACHE = Symbol('cache') +const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet') + +const naiveLength = () => 1 + +// lruList is a yallist where the head is the youngest +// item, and the tail is the oldest. the list contains the Hit +// objects as the entries. +// Each Hit object has a reference to its Yallist.Node. This +// never changes. +// +// cache is a Map (or PseudoMap) that matches the keys to +// the Yallist.Node object. +class LRUCache { + constructor (options) { + if (typeof options === 'number') + options = { max: options } + + if (!options) + options = {} + + if (options.max && (typeof options.max !== 'number' || options.max < 0)) + throw new TypeError('max must be a non-negative number') + // Kind of weird to have a default max of Infinity, but oh well. + const max = this[MAX] = options.max || Infinity + + const lc = options.length || naiveLength + this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc + this[ALLOW_STALE] = options.stale || false + if (options.maxAge && typeof options.maxAge !== 'number') + throw new TypeError('maxAge must be a number') + this[MAX_AGE] = options.maxAge || 0 + this[DISPOSE] = options.dispose + this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false + this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false + this.reset() + } + + // resize the cache when the max changes. + set max (mL) { + if (typeof mL !== 'number' || mL < 0) + throw new TypeError('max must be a non-negative number') + + this[MAX] = mL || Infinity + trim(this) + } + get max () { + return this[MAX] + } + + set allowStale (allowStale) { + this[ALLOW_STALE] = !!allowStale + } + get allowStale () { + return this[ALLOW_STALE] + } + + set maxAge (mA) { + if (typeof mA !== 'number') + throw new TypeError('maxAge must be a non-negative number') + + this[MAX_AGE] = mA + trim(this) + } + get maxAge () { + return this[MAX_AGE] + } + + // resize the cache when the lengthCalculator changes. + set lengthCalculator (lC) { + if (typeof lC !== 'function') + lC = naiveLength + + if (lC !== this[LENGTH_CALCULATOR]) { + this[LENGTH_CALCULATOR] = lC + this[LENGTH] = 0 + this[LRU_LIST].forEach(hit => { + hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key) + this[LENGTH] += hit.length + }) + } + trim(this) + } + get lengthCalculator () { return this[LENGTH_CALCULATOR] } + + get length () { return this[LENGTH] } + get itemCount () { return this[LRU_LIST].length } + + rforEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].tail; walker !== null;) { + const prev = walker.prev + forEachStep(this, fn, walker, thisp) + walker = prev + } + } + + forEach (fn, thisp) { + thisp = thisp || this + for (let walker = this[LRU_LIST].head; walker !== null;) { + const next = walker.next + forEachStep(this, fn, walker, thisp) + walker = next + } + } + + keys () { + return this[LRU_LIST].toArray().map(k => k.key) + } + + values () { + return this[LRU_LIST].toArray().map(k => k.value) + } + + reset () { + if (this[DISPOSE] && + this[LRU_LIST] && + this[LRU_LIST].length) { + this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value)) + } + + this[CACHE] = new Map() // hash of items by key + this[LRU_LIST] = new Yallist() // list of items in order of use recency + this[LENGTH] = 0 // length of items in the list + } + + dump () { + return this[LRU_LIST].map(hit => + isStale(this, hit) ? false : { + k: hit.key, + v: hit.value, + e: hit.now + (hit.maxAge || 0) + }).toArray().filter(h => h) + } + + dumpLru () { + return this[LRU_LIST] + } + + set (key, value, maxAge) { + maxAge = maxAge || this[MAX_AGE] + + if (maxAge && typeof maxAge !== 'number') + throw new TypeError('maxAge must be a number') + + const now = maxAge ? Date.now() : 0 + const len = this[LENGTH_CALCULATOR](value, key) + + if (this[CACHE].has(key)) { + if (len > this[MAX]) { + del(this, this[CACHE].get(key)) + return false + } + + const node = this[CACHE].get(key) + const item = node.value + + // dispose of the old one before overwriting + // split out into 2 ifs for better coverage tracking + if (this[DISPOSE]) { + if (!this[NO_DISPOSE_ON_SET]) + this[DISPOSE](key, item.value) + } + + item.now = now + item.maxAge = maxAge + item.value = value + this[LENGTH] += len - item.length + item.length = len + this.get(key) + trim(this) + return true + } + + const hit = new Entry(key, value, len, now, maxAge) + + // oversized objects fall out of cache automatically. + if (hit.length > this[MAX]) { + if (this[DISPOSE]) + this[DISPOSE](key, value) + + return false + } + + this[LENGTH] += hit.length + this[LRU_LIST].unshift(hit) + this[CACHE].set(key, this[LRU_LIST].head) + trim(this) + return true + } + + has (key) { + if (!this[CACHE].has(key)) return false + const hit = this[CACHE].get(key).value + return !isStale(this, hit) + } + + get (key) { + return get(this, key, true) + } + + peek (key) { + return get(this, key, false) + } + + pop () { + const node = this[LRU_LIST].tail + if (!node) + return null + + del(this, node) + return node.value + } + + del (key) { + del(this, this[CACHE].get(key)) + } + + load (arr) { + // reset the cache + this.reset() + + const now = Date.now() + // A previous serialized cache has the most recent items first + for (let l = arr.length - 1; l >= 0; l--) { + const hit = arr[l] + const expiresAt = hit.e || 0 + if (expiresAt === 0) + // the item was created without expiration in a non aged cache + this.set(hit.k, hit.v) + else { + const maxAge = expiresAt - now + // dont add already expired items + if (maxAge > 0) { + this.set(hit.k, hit.v, maxAge) + } + } + } + } + + prune () { + this[CACHE].forEach((value, key) => get(this, key, false)) + } +} + +const get = (self, key, doUse) => { + const node = self[CACHE].get(key) + if (node) { + const hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + return undefined + } else { + if (doUse) { + if (self[UPDATE_AGE_ON_GET]) + node.value.now = Date.now() + self[LRU_LIST].unshiftNode(node) + } + } + return hit.value + } +} + +const isStale = (self, hit) => { + if (!hit || (!hit.maxAge && !self[MAX_AGE])) + return false + + const diff = Date.now() - hit.now + return hit.maxAge ? diff > hit.maxAge + : self[MAX_AGE] && (diff > self[MAX_AGE]) +} + +const trim = self => { + if (self[LENGTH] > self[MAX]) { + for (let walker = self[LRU_LIST].tail; + self[LENGTH] > self[MAX] && walker !== null;) { + // We know that we're about to delete this one, and also + // what the next least recently used key will be, so just + // go ahead and set it now. + const prev = walker.prev + del(self, walker) + walker = prev + } + } +} + +const del = (self, node) => { + if (node) { + const hit = node.value + if (self[DISPOSE]) + self[DISPOSE](hit.key, hit.value) + + self[LENGTH] -= hit.length + self[CACHE].delete(hit.key) + self[LRU_LIST].removeNode(node) + } +} + +class Entry { + constructor (key, value, length, now, maxAge) { + this.key = key + this.value = value + this.length = length + this.now = now + this.maxAge = maxAge || 0 + } +} + +const forEachStep = (self, fn, node, thisp) => { + let hit = node.value + if (isStale(self, hit)) { + del(self, node) + if (!self[ALLOW_STALE]) + hit = undefined + } + if (hit) + fn.call(thisp, hit.value, hit.key, self) +} + +module.exports = LRUCache diff --git a/node_modules/@puppeteer/browsers/node_modules/lru-cache/package.json b/node_modules/@puppeteer/browsers/node_modules/lru-cache/package.json new file mode 100644 index 000000000..43b7502c3 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/lru-cache/package.json @@ -0,0 +1,34 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "6.0.0", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "main": "index.js", + "repository": "git://github.com/isaacs/node-lru-cache.git", + "devDependencies": { + "benchmark": "^2.1.4", + "tap": "^14.10.7" + }, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "files": [ + "index.js" + ], + "engines": { + "node": ">=10" + } +} diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/bin/semver.js b/node_modules/@puppeteer/browsers/node_modules/semver/bin/semver.js new file mode 100755 index 000000000..242b7ade7 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/bin/semver.js @@ -0,0 +1,197 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +const argv = process.argv.slice(2) + +let versions = [] + +const range = [] + +let inc = null + +const version = require('../package.json').version + +let loose = false + +let includePrerelease = false + +let coerce = false + +let rtl = false + +let identifier + +let identifierBase + +const semver = require('../') +const parseOptions = require('../internal/parse-options') + +let reverse = false + +let options = {} + +const main = () => { + if (!argv.length) { + return help() + } + while (argv.length) { + let a = argv.shift() + const indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + const value = a.slice(indexOfEqualSign + 1) + a = a.slice(0, indexOfEqualSign) + argv.unshift(value) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-n': + identifierBase = argv.shift() + if (identifierBase === 'false') { + identifierBase = false + } + break + case '-c': case '--coerce': + coerce = true + break + case '--rtl': + rtl = true + break + case '--ltr': + rtl = false + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + options = parseOptions({ loose, includePrerelease, rtl }) + + versions = versions.map((v) => { + return coerce ? (semver.coerce(v, options) || { version: v }).version : v + }).filter((v) => { + return semver.valid(v) + }) + if (!versions.length) { + return fail() + } + if (inc && (versions.length !== 1 || range.length)) { + return failInc() + } + + for (let i = 0, l = range.length; i < l; i++) { + versions = versions.filter((v) => { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) { + return fail() + } + } + return success(versions) +} + +const failInc = () => { + console.error('--inc can only be used on a single version with no range') + fail() +} + +const fail = () => process.exit(1) + +const success = () => { + const compare = reverse ? 'rcompare' : 'compare' + versions.sort((a, b) => { + return semver[compare](a, b, options) + }).map((v) => { + return semver.clean(v, options) + }).map((v) => { + return inc ? semver.inc(v, inc, options, identifier, identifierBase) : v + }).forEach((v, i, _) => { + console.log(v) + }) +} + +const help = () => console.log( +`SemVer ${version} + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +-n + Base number to be used for the prerelease identifier. + Can be either 0 or 1, or false to omit the number altogether. + Defaults to 0. + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them.`) + +main() diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/classes/comparator.js b/node_modules/@puppeteer/browsers/node_modules/semver/classes/comparator.js new file mode 100644 index 000000000..3d39c0eef --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/classes/comparator.js @@ -0,0 +1,141 @@ +const ANY = Symbol('SemVer ANY') +// hoisted class for cyclic dependency +class Comparator { + static get ANY () { + return ANY + } + + constructor (comp, options) { + options = parseOptions(options) + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) + } + + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + const m = comp.match(r) + + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } + } + + toString () { + return this.value + } + + test (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) + } + + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } + + options = parseOptions(options) + + // Special cases where nothing can possibly be lower + if (options.includePrerelease && + (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { + return false + } + if (!options.includePrerelease && + (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { + return false + } + + // Same direction increasing (> or >=) + if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { + return true + } + // Same direction decreasing (< or <=) + if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { + return true + } + // same SemVer and both sides are inclusive (<= or >=) + if ( + (this.semver.version === comp.semver.version) && + this.operator.includes('=') && comp.operator.includes('=')) { + return true + } + // opposite directions less than + if (cmp(this.semver, '<', comp.semver, options) && + this.operator.startsWith('>') && comp.operator.startsWith('<')) { + return true + } + // opposite directions greater than + if (cmp(this.semver, '>', comp.semver, options) && + this.operator.startsWith('<') && comp.operator.startsWith('>')) { + return true + } + return false + } +} + +module.exports = Comparator + +const parseOptions = require('../internal/parse-options') +const { safeRe: re, t } = require('../internal/re') +const cmp = require('../functions/cmp') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const Range = require('./range') diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/classes/index.js b/node_modules/@puppeteer/browsers/node_modules/semver/classes/index.js new file mode 100644 index 000000000..5e3f5c9b1 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/classes/index.js @@ -0,0 +1,5 @@ +module.exports = { + SemVer: require('./semver.js'), + Range: require('./range.js'), + Comparator: require('./comparator.js'), +} diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/classes/range.js b/node_modules/@puppeteer/browsers/node_modules/semver/classes/range.js new file mode 100644 index 000000000..7e7c41410 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/classes/range.js @@ -0,0 +1,539 @@ +// hoisted class for cyclic dependency +class Range { + constructor (range, options) { + options = parseOptions(options) + + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value + this.set = [[range]] + this.format() + return this + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') + + // First, split on || + this.set = this.raw + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length) + + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) + } + + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0] + this.set = this.set.filter(c => !isNullSet(c[0])) + if (this.set.length === 0) { + this.set = [first] + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c] + break + } + } + } + } + + this.format() + } + + format () { + this.range = this.set + .map((comps) => comps.join(' ').trim()) + .join('||') + .trim() + return this.range + } + + toString () { + return this.range + } + + parseRange (range) { + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = + (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | + (this.options.loose && FLAG_LOOSE) + const memoKey = memoOpts + ':' + range + const cached = cache.get(memoKey) + if (cached) { + return cached + } + + const loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) + debug('hyphen replace', range) + + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + debug('tilde trim', range) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + debug('caret trim', range) + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)) + + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options) + return !!comp.match(re[t.COMPARATORLOOSE]) + }) + } + debug('range list', rangeList) + + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map() + const comparators = rangeList.map(comp => new Comparator(comp, this.options)) + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp) + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete('') + } + + const result = [...rangeMap.values()] + cache.set(memoKey, result) + return result + } + + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } +} + +module.exports = Range + +const LRU = require('lru-cache') +const cache = new LRU({ max: 1000 }) + +const parseOptions = require('../internal/parse-options') +const Comparator = require('./comparator') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const { + safeRe: re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, +} = require('../internal/re') +const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require('../internal/constants') + +const isNullSet = c => c.value === '<0.0.0-0' +const isAny = c => c.value === '' + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +const isSatisfiable = (comparators, options) => { + let result = true + const remainingComparators = comparators.slice() + let testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +const parseComparator = (comp, options) => { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +const isX = id => !id || id.toLowerCase() === 'x' || id === '*' + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 +// ~0.0.1 --> >=0.0.1 <0.1.0-0 +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} + +const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0` + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` + } else if (pr) { + debug('replaceTilde pr', pr) + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0` + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 +// ^1.2.3 --> >=1.2.3 <2.0.0-0 +// ^1.2.0 --> >=1.2.0 <2.0.0-0 +// ^0.0.1 --> >=0.0.1 <0.0.2-0 +// ^0.1.0 --> >=0.1.0 <0.2.0-0 +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} + +const replaceCaret = (comp, options) => { + debug('caret', comp, options) + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + const z = options.includePrerelease ? '-0' : '' + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0` + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0` + } + } + + debug('caret return', ret) + return ret + }) +} + +const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options) + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') +} + +const replaceXRange = (comp, options) => { + comp = comp.trim() + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + const xM = isX(M) + const xm = xM || isX(m) + const xp = xm || isX(p) + const anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + if (gtlt === '<') { + pr = '-0' + } + + ret = `${gtlt + M}.${m}.${p}${pr}` + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0` + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +const replaceStars = (comp, options) => { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp + .trim() + .replace(re[t.STAR], '') +} + +const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options) + return comp + .trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 +const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) => { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}` + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` + } else if (fpr) { + from = `>=${from}` + } else { + from = `>=${from}${incPr ? '-0' : ''}` + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0` + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0` + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}` + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0` + } else { + to = `<=${to}` + } + + return `${from} ${to}`.trim() +} + +const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/classes/semver.js b/node_modules/@puppeteer/browsers/node_modules/semver/classes/semver.js new file mode 100644 index 000000000..84e84590e --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/classes/semver.js @@ -0,0 +1,302 @@ +const debug = require('../internal/debug') +const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants') +const { safeRe: re, t } = require('../internal/re') + +const parseOptions = require('../internal/parse-options') +const { compareIdentifiers } = require('../internal/identifiers') +class SemVer { + constructor (version, options) { + options = parseOptions(options) + + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease + + const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() + } + + format () { + this.version = `${this.major}.${this.minor}.${this.patch}` + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}` + } + return this.version + } + + toString () { + return this.version + } + + compare (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options) + } + + if (other.version === this.version) { + return 0 + } + + return this.compareMain(other) || this.comparePre(other) + } + + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return ( + compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) + ) + } + + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + let i = 0 + do { + const a = this.prerelease[i] + const b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + let i = 0 + do { + const a = this.build[i] + const b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier, identifierBase) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier, identifierBase) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier, identifierBase) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier, identifierBase) + this.inc('pre', identifier, identifierBase) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier, identifierBase) + } + this.inc('pre', identifier, identifierBase) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': { + const base = Number(identifierBase) ? 1 : 0 + + if (!identifier && identifierBase === false) { + throw new Error('invalid increment argument: identifier is empty') + } + + if (this.prerelease.length === 0) { + this.prerelease = [base] + } else { + let i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + if (identifier === this.prerelease.join('.') && identifierBase === false) { + throw new Error('invalid increment argument: identifier already exists') + } + this.prerelease.push(base) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + let prerelease = [identifier, base] + if (identifierBase === false) { + prerelease = [identifier] + } + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = prerelease + } + } else { + this.prerelease = prerelease + } + } + break + } + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } + return this + } +} + +module.exports = SemVer diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/clean.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/clean.js new file mode 100644 index 000000000..811fe6b82 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/clean.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const clean = (version, options) => { + const s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} +module.exports = clean diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/cmp.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/cmp.js new file mode 100644 index 000000000..401190947 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/cmp.js @@ -0,0 +1,52 @@ +const eq = require('./eq') +const neq = require('./neq') +const gt = require('./gt') +const gte = require('./gte') +const lt = require('./lt') +const lte = require('./lte') + +const cmp = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a === b + + case '!==': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError(`Invalid operator: ${op}`) + } +} +module.exports = cmp diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/coerce.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/coerce.js new file mode 100644 index 000000000..b378dcea4 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/coerce.js @@ -0,0 +1,60 @@ +const SemVer = require('../classes/semver') +const parse = require('./parse') +const { safeRe: re, t } = require('../internal/re') + +const coerce = (version, options) => { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + let match = null + if (!options.rtl) { + match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL] + let next + while ((next = coerceRtlRegex.exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + coerceRtlRegex.lastIndex = -1 + } + + if (match === null) { + return null + } + + const major = match[2] + const minor = match[3] || '0' + const patch = match[4] || '0' + const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : '' + const build = options.includePrerelease && match[6] ? `+${match[6]}` : '' + + return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options) +} +module.exports = coerce diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/compare-build.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/compare-build.js new file mode 100644 index 000000000..9eb881bef --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/compare-build.js @@ -0,0 +1,7 @@ +const SemVer = require('../classes/semver') +const compareBuild = (a, b, loose) => { + const versionA = new SemVer(a, loose) + const versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} +module.exports = compareBuild diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/compare-loose.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/compare-loose.js new file mode 100644 index 000000000..4881fbe00 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/compare-loose.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const compareLoose = (a, b) => compare(a, b, true) +module.exports = compareLoose diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/compare.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/compare.js new file mode 100644 index 000000000..748b7afa5 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/compare.js @@ -0,0 +1,5 @@ +const SemVer = require('../classes/semver') +const compare = (a, b, loose) => + new SemVer(a, loose).compare(new SemVer(b, loose)) + +module.exports = compare diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/diff.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/diff.js new file mode 100644 index 000000000..fc224e302 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/diff.js @@ -0,0 +1,65 @@ +const parse = require('./parse.js') + +const diff = (version1, version2) => { + const v1 = parse(version1, null, true) + const v2 = parse(version2, null, true) + const comparison = v1.compare(v2) + + if (comparison === 0) { + return null + } + + const v1Higher = comparison > 0 + const highVersion = v1Higher ? v1 : v2 + const lowVersion = v1Higher ? v2 : v1 + const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' + } + + // Otherwise it can be determined by checking the high version + + if (highVersion.patch) { + // anything higher than a patch bump would result in the wrong version + return 'patch' + } + + if (highVersion.minor) { + // anything higher than a minor bump would result in the wrong version + return 'minor' + } + + // bumping major/minor/patch all have same result + return 'major' + } + + // add the `pre` prefix if we are going to a prerelease version + const prefix = highHasPre ? 'pre' : '' + + if (v1.major !== v2.major) { + return prefix + 'major' + } + + if (v1.minor !== v2.minor) { + return prefix + 'minor' + } + + if (v1.patch !== v2.patch) { + return prefix + 'patch' + } + + // high and low are preleases + return 'prerelease' +} + +module.exports = diff diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/eq.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/eq.js new file mode 100644 index 000000000..271fed976 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/eq.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const eq = (a, b, loose) => compare(a, b, loose) === 0 +module.exports = eq diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/gt.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/gt.js new file mode 100644 index 000000000..d9b2156d8 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/gt.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const gt = (a, b, loose) => compare(a, b, loose) > 0 +module.exports = gt diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/gte.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/gte.js new file mode 100644 index 000000000..5aeaa6347 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/gte.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const gte = (a, b, loose) => compare(a, b, loose) >= 0 +module.exports = gte diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/inc.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/inc.js new file mode 100644 index 000000000..7670b1bea --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/inc.js @@ -0,0 +1,19 @@ +const SemVer = require('../classes/semver') + +const inc = (version, release, options, identifier, identifierBase) => { + if (typeof (options) === 'string') { + identifierBase = identifier + identifier = options + options = undefined + } + + try { + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier, identifierBase).version + } catch (er) { + return null + } +} +module.exports = inc diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/lt.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/lt.js new file mode 100644 index 000000000..b440ab7d4 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/lt.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const lt = (a, b, loose) => compare(a, b, loose) < 0 +module.exports = lt diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/lte.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/lte.js new file mode 100644 index 000000000..6dcc95650 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/lte.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const lte = (a, b, loose) => compare(a, b, loose) <= 0 +module.exports = lte diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/major.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/major.js new file mode 100644 index 000000000..4283165e9 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/major.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const major = (a, loose) => new SemVer(a, loose).major +module.exports = major diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/minor.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/minor.js new file mode 100644 index 000000000..57b3455f8 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/minor.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const minor = (a, loose) => new SemVer(a, loose).minor +module.exports = minor diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/neq.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/neq.js new file mode 100644 index 000000000..f944c0157 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/neq.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const neq = (a, b, loose) => compare(a, b, loose) !== 0 +module.exports = neq diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/parse.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/parse.js new file mode 100644 index 000000000..459b3b173 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/parse.js @@ -0,0 +1,16 @@ +const SemVer = require('../classes/semver') +const parse = (version, options, throwErrors = false) => { + if (version instanceof SemVer) { + return version + } + try { + return new SemVer(version, options) + } catch (er) { + if (!throwErrors) { + return null + } + throw er + } +} + +module.exports = parse diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/patch.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/patch.js new file mode 100644 index 000000000..63afca252 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/patch.js @@ -0,0 +1,3 @@ +const SemVer = require('../classes/semver') +const patch = (a, loose) => new SemVer(a, loose).patch +module.exports = patch diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/prerelease.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/prerelease.js new file mode 100644 index 000000000..06aa13248 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/prerelease.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const prerelease = (version, options) => { + const parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} +module.exports = prerelease diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/rcompare.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/rcompare.js new file mode 100644 index 000000000..0ac509e79 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/rcompare.js @@ -0,0 +1,3 @@ +const compare = require('./compare') +const rcompare = (a, b, loose) => compare(b, a, loose) +module.exports = rcompare diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/rsort.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/rsort.js new file mode 100644 index 000000000..82404c5cf --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/rsort.js @@ -0,0 +1,3 @@ +const compareBuild = require('./compare-build') +const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) +module.exports = rsort diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/satisfies.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/satisfies.js new file mode 100644 index 000000000..50af1c199 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/satisfies.js @@ -0,0 +1,10 @@ +const Range = require('../classes/range') +const satisfies = (version, range, options) => { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} +module.exports = satisfies diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/sort.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/sort.js new file mode 100644 index 000000000..4d10917ab --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/sort.js @@ -0,0 +1,3 @@ +const compareBuild = require('./compare-build') +const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) +module.exports = sort diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/functions/valid.js b/node_modules/@puppeteer/browsers/node_modules/semver/functions/valid.js new file mode 100644 index 000000000..f27bae107 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/functions/valid.js @@ -0,0 +1,6 @@ +const parse = require('./parse') +const valid = (version, options) => { + const v = parse(version, options) + return v ? v.version : null +} +module.exports = valid diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/index.js b/node_modules/@puppeteer/browsers/node_modules/semver/index.js new file mode 100644 index 000000000..86d42ac16 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/index.js @@ -0,0 +1,89 @@ +// just pre-load all the stuff that index.js lazily exports +const internalRe = require('./internal/re') +const constants = require('./internal/constants') +const SemVer = require('./classes/semver') +const identifiers = require('./internal/identifiers') +const parse = require('./functions/parse') +const valid = require('./functions/valid') +const clean = require('./functions/clean') +const inc = require('./functions/inc') +const diff = require('./functions/diff') +const major = require('./functions/major') +const minor = require('./functions/minor') +const patch = require('./functions/patch') +const prerelease = require('./functions/prerelease') +const compare = require('./functions/compare') +const rcompare = require('./functions/rcompare') +const compareLoose = require('./functions/compare-loose') +const compareBuild = require('./functions/compare-build') +const sort = require('./functions/sort') +const rsort = require('./functions/rsort') +const gt = require('./functions/gt') +const lt = require('./functions/lt') +const eq = require('./functions/eq') +const neq = require('./functions/neq') +const gte = require('./functions/gte') +const lte = require('./functions/lte') +const cmp = require('./functions/cmp') +const coerce = require('./functions/coerce') +const Comparator = require('./classes/comparator') +const Range = require('./classes/range') +const satisfies = require('./functions/satisfies') +const toComparators = require('./ranges/to-comparators') +const maxSatisfying = require('./ranges/max-satisfying') +const minSatisfying = require('./ranges/min-satisfying') +const minVersion = require('./ranges/min-version') +const validRange = require('./ranges/valid') +const outside = require('./ranges/outside') +const gtr = require('./ranges/gtr') +const ltr = require('./ranges/ltr') +const intersects = require('./ranges/intersects') +const simplifyRange = require('./ranges/simplify') +const subset = require('./ranges/subset') +module.exports = { + parse, + valid, + clean, + inc, + diff, + major, + minor, + patch, + prerelease, + compare, + rcompare, + compareLoose, + compareBuild, + sort, + rsort, + gt, + lt, + eq, + neq, + gte, + lte, + cmp, + coerce, + Comparator, + Range, + satisfies, + toComparators, + maxSatisfying, + minSatisfying, + minVersion, + validRange, + outside, + gtr, + ltr, + intersects, + simplifyRange, + subset, + SemVer, + re: internalRe.re, + src: internalRe.src, + tokens: internalRe.t, + SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + RELEASE_TYPES: constants.RELEASE_TYPES, + compareIdentifiers: identifiers.compareIdentifiers, + rcompareIdentifiers: identifiers.rcompareIdentifiers, +} diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/internal/constants.js b/node_modules/@puppeteer/browsers/node_modules/semver/internal/constants.js new file mode 100644 index 000000000..94be1c570 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/internal/constants.js @@ -0,0 +1,35 @@ +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0' + +const MAX_LENGTH = 256 +const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16 + +// Max safe length for a build identifier. The max length minus 6 characters for +// the shortest version with a build 0.0.0+BUILD. +const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +const RELEASE_TYPES = [ + 'major', + 'premajor', + 'minor', + 'preminor', + 'patch', + 'prepatch', + 'prerelease', +] + +module.exports = { + MAX_LENGTH, + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_SAFE_INTEGER, + RELEASE_TYPES, + SEMVER_SPEC_VERSION, + FLAG_INCLUDE_PRERELEASE: 0b001, + FLAG_LOOSE: 0b010, +} diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/internal/debug.js b/node_modules/@puppeteer/browsers/node_modules/semver/internal/debug.js new file mode 100644 index 000000000..1c00e1369 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/internal/debug.js @@ -0,0 +1,9 @@ +const debug = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {} + +module.exports = debug diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/internal/identifiers.js b/node_modules/@puppeteer/browsers/node_modules/semver/internal/identifiers.js new file mode 100644 index 000000000..e612d0a3d --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/internal/identifiers.js @@ -0,0 +1,23 @@ +const numeric = /^[0-9]+$/ +const compareIdentifiers = (a, b) => { + const anum = numeric.test(a) + const bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) + +module.exports = { + compareIdentifiers, + rcompareIdentifiers, +} diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/internal/parse-options.js b/node_modules/@puppeteer/browsers/node_modules/semver/internal/parse-options.js new file mode 100644 index 000000000..10d64ce06 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/internal/parse-options.js @@ -0,0 +1,15 @@ +// parse out just the options we care about +const looseOption = Object.freeze({ loose: true }) +const emptyOpts = Object.freeze({ }) +const parseOptions = options => { + if (!options) { + return emptyOpts + } + + if (typeof options !== 'object') { + return looseOption + } + + return options +} +module.exports = parseOptions diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/internal/re.js b/node_modules/@puppeteer/browsers/node_modules/semver/internal/re.js new file mode 100644 index 000000000..fd8920e7b --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/internal/re.js @@ -0,0 +1,217 @@ +const { + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_LENGTH, +} = require('./constants') +const debug = require('./debug') +exports = module.exports = {} + +// The actual regexps go on exports.re +const re = exports.re = [] +const safeRe = exports.safeRe = [] +const src = exports.src = [] +const t = exports.t = {} +let R = 0 + +const LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +const safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +const makeSafeRegex = (value) => { + for (const [token, max] of safeRegexReplacements) { + value = value + .split(`${token}*`).join(`${token}{0,${max}}`) + .split(`${token}+`).join(`${token}{1,${max}}`) + } + return value +} + +const createToken = (name, value, isGlobal) => { + const safe = makeSafeRegex(value) + const index = R++ + debug(name, index, value) + t[name] = index + src[index] = value + re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') +createToken('NUMERICIDENTIFIERLOOSE', '\\d+') + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`) + +// ## Main Version +// Three dot-separated numeric identifiers. + +createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`) + +createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NUMERICIDENTIFIER] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NUMERICIDENTIFIERLOOSE] +}|${src[t.NONNUMERICIDENTIFIER]})`) + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] +}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + +createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] +}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`) + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] +}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +createToken('FULLPLAIN', `v?${src[t.MAINVERSION] +}${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`) + +createToken('FULL', `^${src[t.FULLPLAIN]}$`) + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] +}${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`) + +createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) + +createToken('GTLT', '((?:<|>)?=?)') + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) +createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + +createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) +createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +createToken('COERCEPLAIN', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`) +createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`) +createToken('COERCEFULL', src[t.COERCEPLAIN] + + `(?:${src[t.PRERELEASE]})?` + + `(?:${src[t.BUILD]})?` + + `(?:$|[^\\d])`) +createToken('COERCERTL', src[t.COERCE], true) +createToken('COERCERTLFULL', src[t.COERCEFULL], true) + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +createToken('LONETILDE', '(?:~>?)') + +createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) +exports.tildeTrimReplace = '$1~' + +createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) +createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +createToken('LONECARET', '(?:\\^)') + +createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) +exports.caretTrimReplace = '$1^' + +createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) +createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) +createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] +}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) +exports.comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`) + +createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`) + +// Star ranges basically just allow anything at all. +createToken('STAR', '(<|>)?=?\\s*\\*') +// >=0.0.0 is like a star +createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') +createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/package.json b/node_modules/@puppeteer/browsers/node_modules/semver/package.json new file mode 100644 index 000000000..f00c6bdda --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/package.json @@ -0,0 +1,78 @@ +{ + "name": "semver", + "version": "7.6.0", + "description": "The semantic version parser used by npm.", + "main": "index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "tap": "^16.0.0" + }, + "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/npm/node-semver.git" + }, + "bin": { + "semver": "bin/semver.js" + }, + "files": [ + "bin/", + "lib/", + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "tap": { + "timeout": 30, + "coverage-map": "map.js", + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": ">=10" + }, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "engines": ">=10", + "distPaths": [ + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "allowPaths": [ + "/classes/", + "/functions/", + "/internal/", + "/ranges/", + "/index.js", + "/preload.js", + "/range.bnf" + ], + "publish": "true" + } +} diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/preload.js b/node_modules/@puppeteer/browsers/node_modules/semver/preload.js new file mode 100644 index 000000000..947cd4f79 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/preload.js @@ -0,0 +1,2 @@ +// XXX remove in v8 or beyond +module.exports = require('./index.js') diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/range.bnf b/node_modules/@puppeteer/browsers/node_modules/semver/range.bnf new file mode 100644 index 000000000..d4c6ae0d7 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/range.bnf @@ -0,0 +1,16 @@ +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | [1-9] ( [0-9] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/ranges/gtr.js b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/gtr.js new file mode 100644 index 000000000..db7e35599 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/gtr.js @@ -0,0 +1,4 @@ +// Determine if version is greater than all the versions possible in the range. +const outside = require('./outside') +const gtr = (version, range, options) => outside(version, range, '>', options) +module.exports = gtr diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/ranges/intersects.js b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/intersects.js new file mode 100644 index 000000000..e0e9b7ce0 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/intersects.js @@ -0,0 +1,7 @@ +const Range = require('../classes/range') +const intersects = (r1, r2, options) => { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2, options) +} +module.exports = intersects diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/ranges/ltr.js b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/ltr.js new file mode 100644 index 000000000..528a885eb --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/ltr.js @@ -0,0 +1,4 @@ +const outside = require('./outside') +// Determine if version is less than all the versions possible in the range +const ltr = (version, range, options) => outside(version, range, '<', options) +module.exports = ltr diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/ranges/max-satisfying.js b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/max-satisfying.js new file mode 100644 index 000000000..6e3d993c6 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/max-satisfying.js @@ -0,0 +1,25 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') + +const maxSatisfying = (versions, range, options) => { + let max = null + let maxSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} +module.exports = maxSatisfying diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/ranges/min-satisfying.js b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/min-satisfying.js new file mode 100644 index 000000000..9b60974e2 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/min-satisfying.js @@ -0,0 +1,24 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const minSatisfying = (versions, range, options) => { + let min = null + let minSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} +module.exports = minSatisfying diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/ranges/min-version.js b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/min-version.js new file mode 100644 index 000000000..350e1f783 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/min-version.js @@ -0,0 +1,61 @@ +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const gt = require('../functions/gt') + +const minVersion = (range, loose) => { + range = new Range(range, loose) + + let minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let setMin = null + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt(compver, setMin)) { + setMin = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }) + if (setMin && (!minver || gt(minver, setMin))) { + minver = setMin + } + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} +module.exports = minVersion diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/ranges/outside.js b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/outside.js new file mode 100644 index 000000000..ae99b10a5 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/outside.js @@ -0,0 +1,80 @@ +const SemVer = require('../classes/semver') +const Comparator = require('../classes/comparator') +const { ANY } = Comparator +const Range = require('../classes/range') +const satisfies = require('../functions/satisfies') +const gt = require('../functions/gt') +const lt = require('../functions/lt') +const lte = require('../functions/lte') +const gte = require('../functions/gte') + +const outside = (version, range, hilo, options) => { + version = new SemVer(version, options) + range = new Range(range, options) + + let gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisfies the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let high = null + let low = null + + comparators.forEach((comparator) => { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +module.exports = outside diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/ranges/simplify.js b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/simplify.js new file mode 100644 index 000000000..618d5b627 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/simplify.js @@ -0,0 +1,47 @@ +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') +module.exports = (versions, range, options) => { + const set = [] + let first = null + let prev = null + const v = versions.sort((a, b) => compare(a, b, options)) + for (const version of v) { + const included = satisfies(version, range, options) + if (included) { + prev = version + if (!first) { + first = version + } + } else { + if (prev) { + set.push([first, prev]) + } + prev = null + first = null + } + } + if (first) { + set.push([first, null]) + } + + const ranges = [] + for (const [min, max] of set) { + if (min === max) { + ranges.push(min) + } else if (!max && min === v[0]) { + ranges.push('*') + } else if (!max) { + ranges.push(`>=${min}`) + } else if (min === v[0]) { + ranges.push(`<=${max}`) + } else { + ranges.push(`${min} - ${max}`) + } + } + const simplified = ranges.join(' || ') + const original = typeof range.raw === 'string' ? range.raw : String(range) + return simplified.length < original.length ? simplified : range +} diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/ranges/subset.js b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/subset.js new file mode 100644 index 000000000..1e5c26837 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/subset.js @@ -0,0 +1,247 @@ +const Range = require('../classes/range.js') +const Comparator = require('../classes/comparator.js') +const { ANY } = Comparator +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') + +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true + +const subset = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } + + sub = new Range(sub, options) + dom = new Range(dom, options) + let sawNonNull = false + + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options) + sawNonNull = sawNonNull || isSub !== null + if (isSub) { + continue OUTER + } + } + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false + } + } + return true +} + +const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] +const minimumVersion = [new Comparator('>=0.0.0')] + +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true + } + + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = minimumVersionWithPreRelease + } else { + sub = minimumVersion + } + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true + } else { + dom = minimumVersion + } + } + + const eqSet = new Set() + let gt, lt + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options) + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options) + } else { + eqSet.add(c.semver) + } + } + + if (eqSet.size > 1) { + return null + } + + let gtltComp + if (gt && lt) { + gtltComp = compare(gt.semver, lt.semver, options) + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } + + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies(eq, String(gt), options)) { + return null + } + + if (lt && !satisfies(eq, String(lt), options)) { + return null + } + + for (const c of dom) { + if (!satisfies(eq, String(c), options)) { + return false + } + } + + return true + } + + let higher, lower + let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false + } + + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options) + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options) + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { + return false + } + } + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false + } + } + + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false + } + + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false + } + + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false + } + + return true +} + +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +} + +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +} + +module.exports = subset diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/ranges/to-comparators.js b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/to-comparators.js new file mode 100644 index 000000000..6c8bc7e6f --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/to-comparators.js @@ -0,0 +1,8 @@ +const Range = require('../classes/range') + +// Mostly just for testing and legacy API reasons +const toComparators = (range, options) => + new Range(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) + +module.exports = toComparators diff --git a/node_modules/@puppeteer/browsers/node_modules/semver/ranges/valid.js b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/valid.js new file mode 100644 index 000000000..365f35689 --- /dev/null +++ b/node_modules/@puppeteer/browsers/node_modules/semver/ranges/valid.js @@ -0,0 +1,11 @@ +const Range = require('../classes/range') +const validRange = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} +module.exports = validRange diff --git a/node_modules/@puppeteer/browsers/package.json b/node_modules/@puppeteer/browsers/package.json index 7c80404c6..548560624 100644 --- a/node_modules/@puppeteer/browsers/package.json +++ b/node_modules/@puppeteer/browsers/package.json @@ -1,12 +1,12 @@ { "name": "@puppeteer/browsers", - "version": "1.9.1", + "version": "2.2.3", "description": "Download and launch browsers", "scripts": { "build:docs": "wireit", "build": "wireit", "build:test": "wireit", - "clean": "../../tools/clean.js", + "clean": "../../tools/clean.mjs", "test": "wireit" }, "type": "commonjs", @@ -67,7 +67,7 @@ ] }, "test": { - "command": "node tools/downloadTestBrowsers.mjs && cross-env DEBUG=puppeteer:* mocha", + "command": "node tools/downloadTestBrowsers.mjs && mocha", "files": [ ".mocharc.cjs" ], @@ -87,7 +87,7 @@ "author": "The Chromium Authors", "license": "Apache-2.0", "engines": { - "node": ">=16.3.0" + "node": ">=18" }, "files": [ "lib", @@ -98,10 +98,11 @@ "debug": "4.3.4", "extract-zip": "2.0.1", "progress": "2.0.3", - "proxy-agent": "6.3.1", - "tar-fs": "3.0.4", + "proxy-agent": "6.4.0", + "tar-fs": "3.0.5", "unbzip2-stream": "1.4.3", - "yargs": "17.7.2" + "yargs": "17.7.2", + "semver": "7.6.0" }, "devDependencies": { "@types/debug": "4.1.12", diff --git a/node_modules/https-proxy-agent/node_modules/agent-base/dist/src/index.js b/node_modules/@sentry/node/node_modules/agent-base/dist/src/index.js similarity index 100% rename from node_modules/https-proxy-agent/node_modules/agent-base/dist/src/index.js rename to node_modules/@sentry/node/node_modules/agent-base/dist/src/index.js diff --git a/node_modules/https-proxy-agent/node_modules/agent-base/dist/src/promisify.js b/node_modules/@sentry/node/node_modules/agent-base/dist/src/promisify.js similarity index 100% rename from node_modules/https-proxy-agent/node_modules/agent-base/dist/src/promisify.js rename to node_modules/@sentry/node/node_modules/agent-base/dist/src/promisify.js diff --git a/node_modules/https-proxy-agent/node_modules/agent-base/package.json b/node_modules/@sentry/node/node_modules/agent-base/package.json similarity index 100% rename from node_modules/https-proxy-agent/node_modules/agent-base/package.json rename to node_modules/@sentry/node/node_modules/agent-base/package.json diff --git a/node_modules/https-proxy-agent/dist/agent.js b/node_modules/@sentry/node/node_modules/https-proxy-agent/dist/agent.js similarity index 100% rename from node_modules/https-proxy-agent/dist/agent.js rename to node_modules/@sentry/node/node_modules/https-proxy-agent/dist/agent.js diff --git a/node_modules/@sentry/node/node_modules/https-proxy-agent/dist/index.js b/node_modules/@sentry/node/node_modules/https-proxy-agent/dist/index.js new file mode 100644 index 000000000..b03e7631a --- /dev/null +++ b/node_modules/@sentry/node/node_modules/https-proxy-agent/dist/index.js @@ -0,0 +1,14 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(require("./agent")); +function createHttpsProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpsProxyAgent) { + createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; + createHttpsProxyAgent.prototype = agent_1.default.prototype; +})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); +module.exports = createHttpsProxyAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@sentry/node/node_modules/https-proxy-agent/dist/parse-proxy-response.js b/node_modules/@sentry/node/node_modules/https-proxy-agent/dist/parse-proxy-response.js new file mode 100644 index 000000000..aa5ce3cc2 --- /dev/null +++ b/node_modules/@sentry/node/node_modules/https-proxy-agent/dist/parse-proxy-response.js @@ -0,0 +1,66 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const debug_1 = __importDefault(require("debug")); +const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); + } + function onclose(err) { + debug('onclose had error %o', err); + } + function onend() { + debug('onend'); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); + const statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + resolve({ + statusCode, + buffered + }); + } + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + read(); + }); +} +exports.default = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map \ No newline at end of file diff --git a/node_modules/@sentry/node/node_modules/https-proxy-agent/package.json b/node_modules/@sentry/node/node_modules/https-proxy-agent/package.json new file mode 100644 index 000000000..fb2aba1b9 --- /dev/null +++ b/node_modules/@sentry/node/node_modules/https-proxy-agent/package.json @@ -0,0 +1,56 @@ +{ + "name": "https-proxy-agent", + "version": "5.0.1", + "description": "An HTTP(s) proxy `http.Agent` implementation for HTTPS", + "main": "dist/index", + "types": "dist/index", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "mocha --reporter spec", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-https-proxy-agent.git" + }, + "keywords": [ + "https", + "proxy", + "endpoint", + "agent" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-https-proxy-agent/issues" + }, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/node": "^12.12.11", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.2", + "proxy": "1", + "rimraf": "^3.0.0", + "typescript": "^3.5.3" + }, + "engines": { + "node": ">= 6" + } +} diff --git a/node_modules/@types/node/package.json b/node_modules/@types/node/package.json index fe06840a3..31d9e02bf 100644 --- a/node_modules/@types/node/package.json +++ b/node_modules/@types/node/package.json @@ -1,6 +1,6 @@ { "name": "@types/node", - "version": "20.12.3", + "version": "20.14.7", "description": "TypeScript definitions for node", "homepage": "https://github.com/DefinitelyTyped/DefinitelyTyped/tree/master/types/node", "license": "MIT", @@ -212,6 +212,6 @@ "dependencies": { "undici-types": "~5.26.4" }, - "typesPublisherContentHash": "02d120a0066fb0a44108576dbebd52092ec7972656913aa42b204107e43f732a", + "typesPublisherContentHash": "e79282850f387d1bca6eea06eccb5f41cc0418e1cdf8351686abc23d073af52d", "typeScriptVersion": "4.7" } \ No newline at end of file diff --git a/node_modules/abort-controller/browser.js b/node_modules/abort-controller/browser.js new file mode 100644 index 000000000..b0c5ec37d --- /dev/null +++ b/node_modules/abort-controller/browser.js @@ -0,0 +1,13 @@ +/*globals self, window */ +"use strict" + +/*eslint-disable @mysticatea/prettier */ +const { AbortController, AbortSignal } = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +module.exports = AbortController +module.exports.AbortSignal = AbortSignal +module.exports.default = AbortController diff --git a/node_modules/abort-controller/browser.mjs b/node_modules/abort-controller/browser.mjs new file mode 100644 index 000000000..a8f321afe --- /dev/null +++ b/node_modules/abort-controller/browser.mjs @@ -0,0 +1,11 @@ +/*globals self, window */ + +/*eslint-disable @mysticatea/prettier */ +const { AbortController, AbortSignal } = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +export default AbortController +export { AbortController, AbortSignal } diff --git a/node_modules/abort-controller/dist/abort-controller.js b/node_modules/abort-controller/dist/abort-controller.js new file mode 100644 index 000000000..49af73955 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.js @@ -0,0 +1,127 @@ +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +var eventTargetShim = require('event-target-shim'); + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends eventTargetShim.EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + eventTargetShim.EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +exports.AbortController = AbortController; +exports.AbortSignal = AbortSignal; +exports.default = AbortController; + +module.exports = AbortController +module.exports.AbortController = module.exports["default"] = AbortController +module.exports.AbortSignal = AbortSignal +//# sourceMappingURL=abort-controller.js.map diff --git a/node_modules/abort-controller/dist/abort-controller.mjs b/node_modules/abort-controller/dist/abort-controller.mjs new file mode 100644 index 000000000..88ba22d55 --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.mjs @@ -0,0 +1,118 @@ +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */ +import { EventTarget, defineEventAttribute } from 'event-target-shim'; + +/** + * The signal class. + * @see https://dom.spec.whatwg.org/#abortsignal + */ +class AbortSignal extends EventTarget { + /** + * AbortSignal cannot be constructed directly. + */ + constructor() { + super(); + throw new TypeError("AbortSignal cannot be constructed directly"); + } + /** + * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. + */ + get aborted() { + const aborted = abortedFlags.get(this); + if (typeof aborted !== "boolean") { + throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); + } + return aborted; + } +} +defineEventAttribute(AbortSignal.prototype, "abort"); +/** + * Create an AbortSignal object. + */ +function createAbortSignal() { + const signal = Object.create(AbortSignal.prototype); + EventTarget.call(signal); + abortedFlags.set(signal, false); + return signal; +} +/** + * Abort a given signal. + */ +function abortSignal(signal) { + if (abortedFlags.get(signal) !== false) { + return; + } + abortedFlags.set(signal, true); + signal.dispatchEvent({ type: "abort" }); +} +/** + * Aborted flag for each instances. + */ +const abortedFlags = new WeakMap(); +// Properties should be enumerable. +Object.defineProperties(AbortSignal.prototype, { + aborted: { enumerable: true }, +}); +// `toString()` should return `"[object AbortSignal]"` +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortSignal", + }); +} + +/** + * The AbortController. + * @see https://dom.spec.whatwg.org/#abortcontroller + */ +class AbortController { + /** + * Initialize this controller. + */ + constructor() { + signals.set(this, createAbortSignal()); + } + /** + * Returns the `AbortSignal` object associated with this object. + */ + get signal() { + return getSignal(this); + } + /** + * Abort and signal to any observers that the associated activity is to be aborted. + */ + abort() { + abortSignal(getSignal(this)); + } +} +/** + * Associated signals. + */ +const signals = new WeakMap(); +/** + * Get the associated signal of a given controller. + */ +function getSignal(controller) { + const signal = signals.get(controller); + if (signal == null) { + throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); + } + return signal; +} +// Properties should be enumerable. +Object.defineProperties(AbortController.prototype, { + signal: { enumerable: true }, + abort: { enumerable: true }, +}); +if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { + Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { + configurable: true, + value: "AbortController", + }); +} + +export default AbortController; +export { AbortController, AbortSignal }; +//# sourceMappingURL=abort-controller.mjs.map diff --git a/node_modules/abort-controller/dist/abort-controller.umd.js b/node_modules/abort-controller/dist/abort-controller.umd.js new file mode 100644 index 000000000..f643cfd6b --- /dev/null +++ b/node_modules/abort-controller/dist/abort-controller.umd.js @@ -0,0 +1,5 @@ +/** + * @author Toru Nagashima + * See LICENSE file in root directory for full license. + */(function(a,b){"object"==typeof exports&&"undefined"!=typeof module?b(exports):"function"==typeof define&&define.amd?define(["exports"],b):(a=a||self,b(a.AbortControllerShim={}))})(this,function(a){'use strict';function b(a){return b="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(a){return typeof a}:function(a){return a&&"function"==typeof Symbol&&a.constructor===Symbol&&a!==Symbol.prototype?"symbol":typeof a},b(a)}function c(a,b){if(!(a instanceof b))throw new TypeError("Cannot call a class as a function")}function d(a,b){for(var c,d=0;d=6.5" + }, + "dependencies": { + "event-target-shim": "^5.0.0" + }, + "browser": "./browser.js", + "devDependencies": { + "@babel/core": "^7.2.2", + "@babel/plugin-transform-modules-commonjs": "^7.2.0", + "@babel/preset-env": "^7.3.0", + "@babel/register": "^7.0.0", + "@mysticatea/eslint-plugin": "^8.0.1", + "@mysticatea/spy": "^0.1.2", + "@types/mocha": "^5.2.5", + "@types/node": "^10.12.18", + "assert": "^1.4.1", + "codecov": "^3.1.0", + "dts-bundle-generator": "^2.0.0", + "eslint": "^5.12.1", + "karma": "^3.1.4", + "karma-chrome-launcher": "^2.2.0", + "karma-coverage": "^1.1.2", + "karma-firefox-launcher": "^1.1.0", + "karma-growl-reporter": "^1.0.0", + "karma-ie-launcher": "^1.0.0", + "karma-mocha": "^1.3.0", + "karma-rollup-preprocessor": "^7.0.0-rc.2", + "mocha": "^5.2.0", + "npm-run-all": "^4.1.5", + "nyc": "^13.1.0", + "opener": "^1.5.1", + "rimraf": "^2.6.3", + "rollup": "^1.1.2", + "rollup-plugin-babel": "^4.3.2", + "rollup-plugin-babel-minify": "^7.0.0", + "rollup-plugin-commonjs": "^9.2.0", + "rollup-plugin-node-resolve": "^4.0.0", + "rollup-plugin-sourcemaps": "^0.4.2", + "rollup-plugin-typescript": "^1.0.0", + "rollup-watch": "^4.3.1", + "ts-node": "^8.0.1", + "type-tester": "^1.0.0", + "typescript": "^3.2.4" + }, + "scripts": { + "preversion": "npm test", + "version": "npm run -s build && git add dist/*", + "postversion": "git push && git push --tags", + "clean": "rimraf .nyc_output coverage", + "coverage": "opener coverage/lcov-report/index.html", + "lint": "eslint . --ext .ts", + "build": "run-s -s build:*", + "build:rollup": "rollup -c", + "build:dts": "dts-bundle-generator -o dist/abort-controller.d.ts src/abort-controller.ts && ts-node scripts/fix-dts", + "test": "run-s -s lint test:*", + "test:mocha": "nyc mocha test/*.ts", + "test:karma": "karma start --single-run", + "watch": "run-p -s watch:*", + "watch:mocha": "mocha test/*.ts --require ts-node/register --watch-extensions ts --watch --growl", + "watch:karma": "karma start --watch", + "codecov": "codecov" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/mysticatea/abort-controller.git" + }, + "keywords": [ + "w3c", + "whatwg", + "event", + "events", + "abort", + "cancel", + "abortcontroller", + "abortsignal", + "controller", + "signal", + "shim" + ], + "author": "Toru Nagashima (https://github.com/mysticatea)", + "license": "MIT", + "bugs": { + "url": "https://github.com/mysticatea/abort-controller/issues" + }, + "homepage": "https://github.com/mysticatea/abort-controller#readme" +} diff --git a/node_modules/abort-controller/polyfill.js b/node_modules/abort-controller/polyfill.js new file mode 100644 index 000000000..3ca892330 --- /dev/null +++ b/node_modules/abort-controller/polyfill.js @@ -0,0 +1,21 @@ +/*globals require, self, window */ +"use strict" + +const ac = require("./dist/abort-controller") + +/*eslint-disable @mysticatea/prettier */ +const g = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + typeof global !== "undefined" ? global : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +if (g) { + if (typeof g.AbortController === "undefined") { + g.AbortController = ac.AbortController + } + if (typeof g.AbortSignal === "undefined") { + g.AbortSignal = ac.AbortSignal + } +} diff --git a/node_modules/abort-controller/polyfill.mjs b/node_modules/abort-controller/polyfill.mjs new file mode 100644 index 000000000..0602a64dd --- /dev/null +++ b/node_modules/abort-controller/polyfill.mjs @@ -0,0 +1,19 @@ +/*globals self, window */ +import * as ac from "./dist/abort-controller" + +/*eslint-disable @mysticatea/prettier */ +const g = + typeof self !== "undefined" ? self : + typeof window !== "undefined" ? window : + typeof global !== "undefined" ? global : + /* otherwise */ undefined +/*eslint-enable @mysticatea/prettier */ + +if (g) { + if (typeof g.AbortController === "undefined") { + g.AbortController = ac.AbortController + } + if (typeof g.AbortSignal === "undefined") { + g.AbortSignal = ac.AbortSignal + } +} diff --git a/node_modules/archiver-utils/file.js b/node_modules/archiver-utils/file.js index 912f25eb3..e507bceac 100644 --- a/node_modules/archiver-utils/file.js +++ b/node_modules/archiver-utils/file.js @@ -1,209 +1,209 @@ -/** - * archiver-utils - * - * Copyright (c) 2012-2014 Chris Talkington, contributors. - * Licensed under the MIT license. - * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT - */ -var fs = require('graceful-fs'); -var path = require('path'); - -var flatten = require('lodash.flatten'); -var difference = require('lodash.difference'); -var union = require('lodash.union'); -var isPlainObject = require('lodash.isplainobject'); - -var glob = require('glob'); - -var file = module.exports = {}; - -var pathSeparatorRe = /[\/\\]/g; - -// Process specified wildcard glob patterns or filenames against a -// callback, excluding and uniquing files in the result set. -var processPatterns = function(patterns, fn) { - // Filepaths to return. - var result = []; - // Iterate over flattened patterns array. - flatten(patterns).forEach(function(pattern) { - // If the first character is ! it should be omitted - var exclusion = pattern.indexOf('!') === 0; - // If the pattern is an exclusion, remove the ! - if (exclusion) { pattern = pattern.slice(1); } - // Find all matching files for this pattern. - var matches = fn(pattern); - if (exclusion) { - // If an exclusion, remove matching files. - result = difference(result, matches); - } else { - // Otherwise add matching files. - result = union(result, matches); - } - }); - return result; -}; - -// True if the file path exists. -file.exists = function() { - var filepath = path.join.apply(path, arguments); - return fs.existsSync(filepath); -}; - -// Return an array of all file paths that match the given wildcard patterns. -file.expand = function(...args) { - // If the first argument is an options object, save those options to pass - // into the File.prototype.glob.sync method. - var options = isPlainObject(args[0]) ? args.shift() : {}; - // Use the first argument if it's an Array, otherwise convert the arguments - // object to an array and use that. - var patterns = Array.isArray(args[0]) ? args[0] : args; - // Return empty set if there are no patterns or filepaths. - if (patterns.length === 0) { return []; } - // Return all matching filepaths. - var matches = processPatterns(patterns, function(pattern) { - // Find all matching files for this pattern. - return glob.sync(pattern, options); - }); - // Filter result set? - if (options.filter) { - matches = matches.filter(function(filepath) { - filepath = path.join(options.cwd || '', filepath); - try { - if (typeof options.filter === 'function') { - return options.filter(filepath); - } else { - // If the file is of the right type and exists, this should work. - return fs.statSync(filepath)[options.filter](); - } - } catch(e) { - // Otherwise, it's probably not the right type. - return false; - } - }); - } - return matches; -}; - -// Build a multi task "files" object dynamically. -file.expandMapping = function(patterns, destBase, options) { - options = Object.assign({ - rename: function(destBase, destPath) { - return path.join(destBase || '', destPath); - } - }, options); - var files = []; - var fileByDest = {}; - // Find all files matching pattern, using passed-in options. - file.expand(options, patterns).forEach(function(src) { - var destPath = src; - // Flatten? - if (options.flatten) { - destPath = path.basename(destPath); - } - // Change the extension? - if (options.ext) { - destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext); - } - // Generate destination filename. - var dest = options.rename(destBase, destPath, options); - // Prepend cwd to src path if necessary. - if (options.cwd) { src = path.join(options.cwd, src); } - // Normalize filepaths to be unix-style. - dest = dest.replace(pathSeparatorRe, '/'); - src = src.replace(pathSeparatorRe, '/'); - // Map correct src path to dest path. - if (fileByDest[dest]) { - // If dest already exists, push this src onto that dest's src array. - fileByDest[dest].src.push(src); - } else { - // Otherwise create a new src-dest file mapping object. - files.push({ - src: [src], - dest: dest, - }); - // And store a reference for later use. - fileByDest[dest] = files[files.length - 1]; - } - }); - return files; -}; - -// reusing bits of grunt's multi-task source normalization -file.normalizeFilesArray = function(data) { - var files = []; - - data.forEach(function(obj) { - var prop; - if ('src' in obj || 'dest' in obj) { - files.push(obj); - } - }); - - if (files.length === 0) { - return []; - } - - files = _(files).chain().forEach(function(obj) { - if (!('src' in obj) || !obj.src) { return; } - // Normalize .src properties to flattened array. - if (Array.isArray(obj.src)) { - obj.src = flatten(obj.src); - } else { - obj.src = [obj.src]; - } - }).map(function(obj) { - // Build options object, removing unwanted properties. - var expandOptions = Object.assign({}, obj); - delete expandOptions.src; - delete expandOptions.dest; - - // Expand file mappings. - if (obj.expand) { - return file.expandMapping(obj.src, obj.dest, expandOptions).map(function(mapObj) { - // Copy obj properties to result. - var result = Object.assign({}, obj); - // Make a clone of the orig obj available. - result.orig = Object.assign({}, obj); - // Set .src and .dest, processing both as templates. - result.src = mapObj.src; - result.dest = mapObj.dest; - // Remove unwanted properties. - ['expand', 'cwd', 'flatten', 'rename', 'ext'].forEach(function(prop) { - delete result[prop]; - }); - return result; - }); - } - - // Copy obj properties to result, adding an .orig property. - var result = Object.assign({}, obj); - // Make a clone of the orig obj available. - result.orig = Object.assign({}, obj); - - if ('src' in result) { - // Expose an expand-on-demand getter method as .src. - Object.defineProperty(result, 'src', { - enumerable: true, - get: function fn() { - var src; - if (!('result' in fn)) { - src = obj.src; - // If src is an array, flatten it. Otherwise, make it into an array. - src = Array.isArray(src) ? flatten(src) : [src]; - // Expand src files, memoizing result. - fn.result = file.expand(expandOptions, src); - } - return fn.result; - } - }); - } - - if ('dest' in result) { - result.dest = obj.dest; - } - - return result; - }).flatten().value(); - - return files; -}; +/** + * archiver-utils + * + * Copyright (c) 2012-2014 Chris Talkington, contributors. + * Licensed under the MIT license. + * https://github.com/archiverjs/node-archiver/blob/master/LICENSE-MIT + */ +var fs = require('graceful-fs'); +var path = require('path'); + +var flatten = require('lodash/flatten'); +var difference = require('lodash/difference'); +var union = require('lodash/union'); +var isPlainObject = require('lodash/isPlainObject'); + +var glob = require('glob'); + +var file = module.exports = {}; + +var pathSeparatorRe = /[\/\\]/g; + +// Process specified wildcard glob patterns or filenames against a +// callback, excluding and uniquing files in the result set. +var processPatterns = function(patterns, fn) { + // Filepaths to return. + var result = []; + // Iterate over flattened patterns array. + flatten(patterns).forEach(function(pattern) { + // If the first character is ! it should be omitted + var exclusion = pattern.indexOf('!') === 0; + // If the pattern is an exclusion, remove the ! + if (exclusion) { pattern = pattern.slice(1); } + // Find all matching files for this pattern. + var matches = fn(pattern); + if (exclusion) { + // If an exclusion, remove matching files. + result = difference(result, matches); + } else { + // Otherwise add matching files. + result = union(result, matches); + } + }); + return result; +}; + +// True if the file path exists. +file.exists = function() { + var filepath = path.join.apply(path, arguments); + return fs.existsSync(filepath); +}; + +// Return an array of all file paths that match the given wildcard patterns. +file.expand = function(...args) { + // If the first argument is an options object, save those options to pass + // into the File.prototype.glob.sync method. + var options = isPlainObject(args[0]) ? args.shift() : {}; + // Use the first argument if it's an Array, otherwise convert the arguments + // object to an array and use that. + var patterns = Array.isArray(args[0]) ? args[0] : args; + // Return empty set if there are no patterns or filepaths. + if (patterns.length === 0) { return []; } + // Return all matching filepaths. + var matches = processPatterns(patterns, function(pattern) { + // Find all matching files for this pattern. + return glob.sync(pattern, options); + }); + // Filter result set? + if (options.filter) { + matches = matches.filter(function(filepath) { + filepath = path.join(options.cwd || '', filepath); + try { + if (typeof options.filter === 'function') { + return options.filter(filepath); + } else { + // If the file is of the right type and exists, this should work. + return fs.statSync(filepath)[options.filter](); + } + } catch(e) { + // Otherwise, it's probably not the right type. + return false; + } + }); + } + return matches; +}; + +// Build a multi task "files" object dynamically. +file.expandMapping = function(patterns, destBase, options) { + options = Object.assign({ + rename: function(destBase, destPath) { + return path.join(destBase || '', destPath); + } + }, options); + var files = []; + var fileByDest = {}; + // Find all files matching pattern, using passed-in options. + file.expand(options, patterns).forEach(function(src) { + var destPath = src; + // Flatten? + if (options.flatten) { + destPath = path.basename(destPath); + } + // Change the extension? + if (options.ext) { + destPath = destPath.replace(/(\.[^\/]*)?$/, options.ext); + } + // Generate destination filename. + var dest = options.rename(destBase, destPath, options); + // Prepend cwd to src path if necessary. + if (options.cwd) { src = path.join(options.cwd, src); } + // Normalize filepaths to be unix-style. + dest = dest.replace(pathSeparatorRe, '/'); + src = src.replace(pathSeparatorRe, '/'); + // Map correct src path to dest path. + if (fileByDest[dest]) { + // If dest already exists, push this src onto that dest's src array. + fileByDest[dest].src.push(src); + } else { + // Otherwise create a new src-dest file mapping object. + files.push({ + src: [src], + dest: dest, + }); + // And store a reference for later use. + fileByDest[dest] = files[files.length - 1]; + } + }); + return files; +}; + +// reusing bits of grunt's multi-task source normalization +file.normalizeFilesArray = function(data) { + var files = []; + + data.forEach(function(obj) { + var prop; + if ('src' in obj || 'dest' in obj) { + files.push(obj); + } + }); + + if (files.length === 0) { + return []; + } + + files = _(files).chain().forEach(function(obj) { + if (!('src' in obj) || !obj.src) { return; } + // Normalize .src properties to flattened array. + if (Array.isArray(obj.src)) { + obj.src = flatten(obj.src); + } else { + obj.src = [obj.src]; + } + }).map(function(obj) { + // Build options object, removing unwanted properties. + var expandOptions = Object.assign({}, obj); + delete expandOptions.src; + delete expandOptions.dest; + + // Expand file mappings. + if (obj.expand) { + return file.expandMapping(obj.src, obj.dest, expandOptions).map(function(mapObj) { + // Copy obj properties to result. + var result = Object.assign({}, obj); + // Make a clone of the orig obj available. + result.orig = Object.assign({}, obj); + // Set .src and .dest, processing both as templates. + result.src = mapObj.src; + result.dest = mapObj.dest; + // Remove unwanted properties. + ['expand', 'cwd', 'flatten', 'rename', 'ext'].forEach(function(prop) { + delete result[prop]; + }); + return result; + }); + } + + // Copy obj properties to result, adding an .orig property. + var result = Object.assign({}, obj); + // Make a clone of the orig obj available. + result.orig = Object.assign({}, obj); + + if ('src' in result) { + // Expose an expand-on-demand getter method as .src. + Object.defineProperty(result, 'src', { + enumerable: true, + get: function fn() { + var src; + if (!('result' in fn)) { + src = obj.src; + // If src is an array, flatten it. Otherwise, make it into an array. + src = Array.isArray(src) ? flatten(src) : [src]; + // Expand src files, memoizing result. + fn.result = file.expand(expandOptions, src); + } + return fn.result; + } + }); + } + + if ('dest' in result) { + result.dest = obj.dest; + } + + return result; + }).flatten().value(); + + return files; +}; diff --git a/node_modules/archiver-utils/index.js b/node_modules/archiver-utils/index.js index a825f5b19..33a316a87 100644 --- a/node_modules/archiver-utils/index.js +++ b/node_modules/archiver-utils/index.js @@ -1,156 +1,155 @@ -/** - * archiver-utils - * - * Copyright (c) 2015 Chris Talkington. - * Licensed under the MIT license. - * https://github.com/archiverjs/archiver-utils/blob/master/LICENSE - */ -var fs = require('graceful-fs'); -var path = require('path'); -var nutil = require('util'); -var lazystream = require('lazystream'); -var normalizePath = require('normalize-path'); -var defaults = require('lodash.defaults'); - -var Stream = require('stream').Stream; -var PassThrough = require('readable-stream').PassThrough; - -var utils = module.exports = {}; -utils.file = require('./file.js'); - -function assertPath(path) { - if (typeof path !== 'string') { - throw new TypeError('Path must be a string. Received ' + nutils.inspect(path)); - } -} - -utils.collectStream = function(source, callback) { - var collection = []; - var size = 0; - - source.on('error', callback); - - source.on('data', function(chunk) { - collection.push(chunk); - size += chunk.length; - }); - - source.on('end', function() { - var buf = new Buffer(size); - var offset = 0; - - collection.forEach(function(data) { - data.copy(buf, offset); - offset += data.length; - }); - - callback(null, buf); - }); -}; - -utils.dateify = function(dateish) { - dateish = dateish || new Date(); - - if (dateish instanceof Date) { - dateish = dateish; - } else if (typeof dateish === 'string') { - dateish = new Date(dateish); - } else { - dateish = new Date(); - } - - return dateish; -}; - -// this is slightly different from lodash version -utils.defaults = function(object, source, guard) { - var args = arguments; - args[0] = args[0] || {}; - - return defaults(...args); -}; - -utils.isStream = function(source) { - return source instanceof Stream; -}; - -utils.lazyReadStream = function(filepath) { - return new lazystream.Readable(function() { - return fs.createReadStream(filepath); - }); -}; - -utils.normalizeInputSource = function(source) { - if (source === null) { - return new Buffer(0); - } else if (typeof source === 'string') { - return new Buffer(source); - } else if (utils.isStream(source) && !source._readableState) { - var normalized = new PassThrough(); - source.pipe(normalized); - - return normalized; - } - - return source; -}; - -utils.sanitizePath = function(filepath) { - return normalizePath(filepath, false).replace(/^\w+:/, '').replace(/^(\.\.\/|\/)+/, ''); -}; - -utils.trailingSlashIt = function(str) { - return str.slice(-1) !== '/' ? str + '/' : str; -}; - -utils.unixifyPath = function(filepath) { - return normalizePath(filepath, false).replace(/^\w+:/, ''); -}; - -utils.walkdir = function(dirpath, base, callback) { - var results = []; - - if (typeof base === 'function') { - callback = base; - base = dirpath; - } - - fs.readdir(dirpath, function(err, list) { - var i = 0; - var file; - var filepath; - - if (err) { - return callback(err); - } - - (function next() { - file = list[i++]; - - if (!file) { - return callback(null, results); - } - - filepath = path.join(dirpath, file); - - fs.stat(filepath, function(err, stats) { - results.push({ - path: filepath, - relative: path.relative(base, filepath).replace(/\\/g, '/'), - stats: stats - }); - - if (stats && stats.isDirectory()) { - utils.walkdir(filepath, base, function(err, res) { - res.forEach(function(dirEntry) { - results.push(dirEntry); - }); - next(); - }); - } else { - next(); - } - }); - })(); - }); -}; +/** + * archiver-utils + * + * Copyright (c) 2015 Chris Talkington. + * Licensed under the MIT license. + * https://github.com/archiverjs/archiver-utils/blob/master/LICENSE + */ +var fs = require('graceful-fs'); +var path = require('path'); +var isStream = require('is-stream'); +var lazystream = require('lazystream'); +var normalizePath = require('normalize-path'); +var defaults = require('lodash/defaults'); + +var Stream = require('stream').Stream; +var PassThrough = require('readable-stream').PassThrough; + +var utils = module.exports = {}; +utils.file = require('./file.js'); + +utils.collectStream = function(source, callback) { + var collection = []; + var size = 0; + + source.on('error', callback); + + source.on('data', function(chunk) { + collection.push(chunk); + size += chunk.length; + }); + + source.on('end', function() { + var buf = Buffer.alloc(size); + var offset = 0; + + collection.forEach(function(data) { + data.copy(buf, offset); + offset += data.length; + }); + + callback(null, buf); + }); +}; + +utils.dateify = function(dateish) { + dateish = dateish || new Date(); + + if (dateish instanceof Date) { + dateish = dateish; + } else if (typeof dateish === 'string') { + dateish = new Date(dateish); + } else { + dateish = new Date(); + } + + return dateish; +}; + +// this is slightly different from lodash version +utils.defaults = function(object, source, guard) { + var args = arguments; + args[0] = args[0] || {}; + + return defaults(...args); +}; + +utils.isStream = function(source) { + return isStream(source); +}; + +utils.lazyReadStream = function(filepath) { + return new lazystream.Readable(function() { + return fs.createReadStream(filepath); + }); +}; + +utils.normalizeInputSource = function(source) { + if (source === null) { + return Buffer.alloc(0); + } else if (typeof source === 'string') { + return Buffer.from(source); + } else if (utils.isStream(source)) { + // Always pipe through a PassThrough stream to guarantee pausing the stream if it's already flowing, + // since it will only be processed in a (distant) future iteration of the event loop, and will lose + // data if already flowing now. + return source.pipe(new PassThrough()); + } + + return source; +}; + +utils.sanitizePath = function(filepath) { + return normalizePath(filepath, false).replace(/^\w+:/, '').replace(/^(\.\.\/|\/)+/, ''); +}; + +utils.trailingSlashIt = function(str) { + return str.slice(-1) !== '/' ? str + '/' : str; +}; + +utils.unixifyPath = function(filepath) { + return normalizePath(filepath, false).replace(/^\w+:/, ''); +}; + +utils.walkdir = function(dirpath, base, callback) { + var results = []; + + if (typeof base === 'function') { + callback = base; + base = dirpath; + } + + fs.readdir(dirpath, function(err, list) { + var i = 0; + var file; + var filepath; + + if (err) { + return callback(err); + } + + (function next() { + file = list[i++]; + + if (!file) { + return callback(null, results); + } + + filepath = path.join(dirpath, file); + + fs.stat(filepath, function(err, stats) { + results.push({ + path: filepath, + relative: path.relative(base, filepath).replace(/\\/g, '/'), + stats: stats + }); + + if (stats && stats.isDirectory()) { + utils.walkdir(filepath, base, function(err, res) { + if(err){ + return callback(err); + } + + res.forEach(function(dirEntry) { + results.push(dirEntry); + }); + + next(); + }); + } else { + next(); + } + }); + })(); + }); +}; diff --git a/node_modules/archiver-utils/node_modules/glob/dist/commonjs/glob.js b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/glob.js new file mode 100644 index 000000000..e1339bbbc --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/glob.js @@ -0,0 +1,247 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Glob = void 0; +const minimatch_1 = require("minimatch"); +const node_url_1 = require("node:url"); +const path_scurry_1 = require("path-scurry"); +const pattern_js_1 = require("./pattern.js"); +const walker_js_1 = require("./walker.js"); +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + includeChildMatches; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = (0, node_url_1.fileURLToPath)(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.includeChildMatches = opts.includeChildMatches !== false; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === + false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' ? path_scurry_1.PathScurryWin32 + : opts.platform === 'darwin' ? path_scurry_1.PathScurryDarwin + : opts.platform ? path_scurry_1.PathScurryPosix + : path_scurry_1.PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + const g = globParts[i]; + /* c8 ignore start */ + if (!g) + throw new Error('invalid pattern object'); + /* c8 ignore stop */ + return new pattern_js_1.Pattern(set, g, 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walk()), + ]; + } + walkSync() { + return [ + ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walkSync(), + ]; + } + stream() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).stream(); + } + streamSync() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +exports.Glob = Glob; +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/commonjs/has-magic.js b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/has-magic.js new file mode 100644 index 000000000..0918bd57e --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/has-magic.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.hasMagic = void 0; +const minimatch_1 = require("minimatch"); +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new minimatch_1.Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +exports.hasMagic = hasMagic; +//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/commonjs/ignore.js b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/ignore.js new file mode 100644 index 000000000..5f1fde068 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/ignore.js @@ -0,0 +1,119 @@ +"use strict"; +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Ignore = void 0; +const minimatch_1 = require("minimatch"); +const pattern_js_1 = require("./pattern.js"); +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + platform; + mmopts; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + this.platform = platform; + this.mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + for (const ign of ignored) + this.add(ign); + } + add(ign) { + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + const mm = new minimatch_1.Minimatch(ign, this.mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + /* c8 ignore start */ + if (!parsed || !globParts) { + throw new Error('invalid pattern object'); + } + // strip off leading ./ portions + // https://github.com/isaacs/node-glob/issues/570 + while (parsed[0] === '.' && globParts[0] === '.') { + parsed.shift(); + globParts.shift(); + } + /* c8 ignore stop */ + const p = new pattern_js_1.Pattern(parsed, globParts, 0, this.platform); + const m = new minimatch_1.Minimatch(p.globString(), this.mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + return true; + } + return false; + } +} +exports.Ignore = Ignore; +//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/commonjs/index.js b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/index.js new file mode 100644 index 000000000..31da9dfd5 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/index.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.glob = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.globIterate = exports.globIterateSync = exports.globSync = exports.globStream = exports.globStreamSync = exports.Ignore = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = void 0; +const minimatch_1 = require("minimatch"); +const glob_js_1 = require("./glob.js"); +const has_magic_js_1 = require("./has-magic.js"); +var minimatch_2 = require("minimatch"); +Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } }); +Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } }); +var glob_js_2 = require("./glob.js"); +Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } }); +var has_magic_js_2 = require("./has-magic.js"); +Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } }); +var ignore_js_1 = require("./ignore.js"); +Object.defineProperty(exports, "Ignore", { enumerable: true, get: function () { return ignore_js_1.Ignore; } }); +function globStreamSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).streamSync(); +} +exports.globStreamSync = globStreamSync; +function globStream(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).stream(); +} +exports.globStream = globStream; +function globSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walkSync(); +} +exports.globSync = globSync; +async function glob_(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walk(); +} +function globIterateSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterateSync(); +} +exports.globIterateSync = globIterateSync; +function globIterate(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterate(); +} +exports.globIterate = globIterate; +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +exports.streamSync = globStreamSync; +exports.stream = Object.assign(globStream, { sync: globStreamSync }); +exports.iterateSync = globIterateSync; +exports.iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +exports.sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +exports.glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync: exports.sync, + globStream, + stream: exports.stream, + globStreamSync, + streamSync: exports.streamSync, + globIterate, + iterate: exports.iterate, + globIterateSync, + iterateSync: exports.iterateSync, + Glob: glob_js_1.Glob, + hasMagic: has_magic_js_1.hasMagic, + escape: minimatch_1.escape, + unescape: minimatch_1.unescape, +}); +exports.glob.glob = exports.glob; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/commonjs/package.json b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/package.json new file mode 100644 index 000000000..5bbefffba --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/archiver-utils/node_modules/glob/dist/commonjs/pattern.js b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/pattern.js new file mode 100644 index 000000000..f0de35fb5 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/pattern.js @@ -0,0 +1,219 @@ +"use strict"; +// this is just a very light wrapper around 2 arrays with an offset index +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pattern = void 0; +const minimatch_1 = require("minimatch"); +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === minimatch_1.GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 ? + this.isAbsolute() ? + this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined ? + this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined ? + this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined ? + this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ? + p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/commonjs/processor.js b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/processor.js new file mode 100644 index 000000000..ee3bb4397 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/processor.js @@ -0,0 +1,301 @@ +"use strict"; +// synchronous utility for filtering entries and calculating subwalks +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0; +const minimatch_1 = require("minimatch"); +/** + * A cache of which patterns have been processed for a given Path + */ +class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +exports.HasWalkedCache = HasWalkedCache; +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +exports.MatchRecord = MatchRecord; +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +exports.SubWalks = SubWalks; +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = + hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined ? + this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must not be final entry, otherwise we would have + // concatenated it earlier. + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + continue; + } + else if (p === minimatch_1.GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === minimatch_1.GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +exports.Processor = Processor; +//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/commonjs/walker.js b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/walker.js new file mode 100644 index 000000000..cb15946d9 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/commonjs/walker.js @@ -0,0 +1,387 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0; +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +const minipass_1 = require("minipass"); +const ignore_js_1 = require("./ignore.js"); +const processor_js_1 = require("./processor.js"); +const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new ignore_js_1.Ignore([ignore], opts) + : Array.isArray(ignore) ? new ignore_js_1.Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + includeChildMatches; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + this.includeChildMatches = opts.includeChildMatches !== false; + if (opts.ignore || !this.includeChildMatches) { + this.#ignore = makeIgnore(opts.ignore ?? [], opts); + if (!this.includeChildMatches && + typeof this.#ignore.add !== 'function') { + const m = 'cannot ignore child matches, ignore lacks add() method.'; + throw new Error(m); + } + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? await e.lstat() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = await s.realpath(); + /* c8 ignore start */ + if (target && (target.isUnknown() || this.opts.stat)) { + await target.lstat(); + } + /* c8 ignore stop */ + } + return this.matchCheckTest(s, ifDir); + } + matchCheckTest(e, ifDir) { + return (e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + (!this.opts.nodir || + !this.opts.follow || + !e.isSymbolicLink() || + !e.realpathCached()?.isDirectory()) && + !this.#ignored(e)) ? + e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? e.lstatSync() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = s.realpathSync(); + if (target && (target?.isUnknown() || this.opts.stat)) { + target.lstatSync(); + } + } + return this.matchCheckTest(s, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + // we know we have an ignore if this is false, but TS doesn't + if (!this.includeChildMatches && this.#ignore?.add) { + const ign = `${e.relativePosix()}/**`; + this.#ignore.add(ign); + } + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ? + '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +exports.GlobUtil = GlobUtil; +class GlobWalker extends GlobUtil { + matches = new Set(); + constructor(patterns, path, opts) { + super(patterns, path, opts); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +exports.GlobWalker = GlobWalker; +class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new minipass_1.Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +exports.GlobStream = GlobStream; +//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/esm/bin.d.mts b/node_modules/archiver-utils/node_modules/glob/dist/esm/bin.d.mts new file mode 100644 index 000000000..77298e477 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/esm/bin.d.mts @@ -0,0 +1,3 @@ +#!/usr/bin/env node +export {}; +//# sourceMappingURL=bin.d.mts.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/esm/bin.mjs b/node_modules/archiver-utils/node_modules/glob/dist/esm/bin.mjs new file mode 100755 index 000000000..5c7bf1e92 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/esm/bin.mjs @@ -0,0 +1,270 @@ +#!/usr/bin/env node +import { foregroundChild } from 'foreground-child'; +import { existsSync } from 'fs'; +import { jack } from 'jackspeak'; +import { loadPackageJson } from 'package-json-from-dist'; +import { join } from 'path'; +import { globStream } from './index.js'; +const { version } = loadPackageJson(import.meta.url, '../package.json'); +const j = jack({ + usage: 'glob [options] [ [ ...]]', +}) + .description(` + Glob v${version} + + Expand the positional glob expression arguments into any matching file + system paths found. + `) + .opt({ + cmd: { + short: 'c', + hint: 'command', + description: `Run the command provided, passing the glob expression + matches as arguments.`, + }, +}) + .opt({ + default: { + short: 'p', + hint: 'pattern', + description: `If no positional arguments are provided, glob will use + this pattern`, + }, +}) + .flag({ + all: { + short: 'A', + description: `By default, the glob cli command will not expand any + arguments that are an exact match to a file on disk. + + This prevents double-expanding, in case the shell expands + an argument whose filename is a glob expression. + + For example, if 'app/*.ts' would match 'app/[id].ts', then + on Windows powershell or cmd.exe, 'glob app/*.ts' will + expand to 'app/[id].ts', as expected. However, in posix + shells such as bash or zsh, the shell will first expand + 'app/*.ts' to a list of filenames. Then glob will look + for a file matching 'app/[id].ts' (ie, 'app/i.ts' or + 'app/d.ts'), which is unexpected. + + Setting '--all' prevents this behavior, causing glob + to treat ALL patterns as glob expressions to be expanded, + even if they are an exact match to a file on disk. + + When setting this option, be sure to enquote arguments + so that the shell will not expand them prior to passing + them to the glob command process. + `, + }, + absolute: { + short: 'a', + description: 'Expand to absolute paths', + }, + 'dot-relative': { + short: 'd', + description: `Prepend './' on relative matches`, + }, + mark: { + short: 'm', + description: `Append a / on any directories matched`, + }, + posix: { + short: 'x', + description: `Always resolve to posix style paths, using '/' as the + directory separator, even on Windows. Drive letter + absolute matches on Windows will be expanded to their + full resolved UNC maths, eg instead of 'C:\\foo\\bar', + it will expand to '//?/C:/foo/bar'. + `, + }, + follow: { + short: 'f', + description: `Follow symlinked directories when expanding '**'`, + }, + realpath: { + short: 'R', + description: `Call 'fs.realpath' on all of the results. In the case + of an entry that cannot be resolved, the entry is + omitted. This incurs a slight performance penalty, of + course, because of the added system calls.`, + }, + stat: { + short: 's', + description: `Call 'fs.lstat' on all entries, whether required or not + to determine if it's a valid match.`, + }, + 'match-base': { + short: 'b', + description: `Perform a basename-only match if the pattern does not + contain any slash characters. That is, '*.js' would be + treated as equivalent to '**/*.js', matching js files + in all directories. + `, + }, + dot: { + description: `Allow patterns to match files/directories that start + with '.', even if the pattern does not start with '.' + `, + }, + nobrace: { + description: 'Do not expand {...} patterns', + }, + nocase: { + description: `Perform a case-insensitive match. This defaults to + 'true' on macOS and Windows platforms, and false on + all others. + + Note: 'nocase' should only be explicitly set when it is + known that the filesystem's case sensitivity differs + from the platform default. If set 'true' on + case-insensitive file systems, then the walk may return + more or less results than expected. + `, + }, + nodir: { + description: `Do not match directories, only files. + + Note: to *only* match directories, append a '/' at the + end of the pattern. + `, + }, + noext: { + description: `Do not expand extglob patterns, such as '+(a|b)'`, + }, + noglobstar: { + description: `Do not expand '**' against multiple path portions. + Ie, treat it as a normal '*' instead.`, + }, + 'windows-path-no-escape': { + description: `Use '\\' as a path separator *only*, and *never* as an + escape character. If set, all '\\' characters are + replaced with '/' in the pattern.`, + }, +}) + .num({ + 'max-depth': { + short: 'D', + description: `Maximum depth to traverse from the current + working directory`, + }, +}) + .opt({ + cwd: { + short: 'C', + description: 'Current working directory to execute/match in', + default: process.cwd(), + }, + root: { + short: 'r', + description: `A string path resolved against the 'cwd', which is + used as the starting point for absolute patterns that + start with '/' (but not drive letters or UNC paths + on Windows). + + Note that this *doesn't* necessarily limit the walk to + the 'root' directory, and doesn't affect the cwd + starting point for non-absolute patterns. A pattern + containing '..' will still be able to traverse out of + the root directory, if it is not an actual root directory + on the filesystem, and any non-absolute patterns will + still be matched in the 'cwd'. + + To start absolute and non-absolute patterns in the same + path, you can use '--root=' to set it to the empty + string. However, be aware that on Windows systems, a + pattern like 'x:/*' or '//host/share/*' will *always* + start in the 'x:/' or '//host/share/' directory, + regardless of the --root setting. + `, + }, + platform: { + description: `Defaults to the value of 'process.platform' if + available, or 'linux' if not. Setting --platform=win32 + on non-Windows systems may cause strange behavior!`, + validOptions: [ + 'aix', + 'android', + 'darwin', + 'freebsd', + 'haiku', + 'linux', + 'openbsd', + 'sunos', + 'win32', + 'cygwin', + 'netbsd', + ], + }, +}) + .optList({ + ignore: { + short: 'i', + description: `Glob patterns to ignore`, + }, +}) + .flag({ + debug: { + short: 'v', + description: `Output a huge amount of noisy debug information about + patterns as they are parsed and used to match files.`, + }, +}) + .flag({ + help: { + short: 'h', + description: 'Show this usage information', + }, +}); +try { + const { positionals, values } = j.parse(); + if (values.help) { + console.log(j.usage()); + process.exit(0); + } + if (positionals.length === 0 && !values.default) + throw 'No patterns provided'; + if (positionals.length === 0 && values.default) + positionals.push(values.default); + const patterns = values.all ? positionals : positionals.filter(p => !existsSync(p)); + const matches = values.all ? + [] + : positionals.filter(p => existsSync(p)).map(p => join(p)); + const stream = globStream(patterns, { + absolute: values.absolute, + cwd: values.cwd, + dot: values.dot, + dotRelative: values['dot-relative'], + follow: values.follow, + ignore: values.ignore, + mark: values.mark, + matchBase: values['match-base'], + maxDepth: values['max-depth'], + nobrace: values.nobrace, + nocase: values.nocase, + nodir: values.nodir, + noext: values.noext, + noglobstar: values.noglobstar, + platform: values.platform, + realpath: values.realpath, + root: values.root, + stat: values.stat, + debug: values.debug, + posix: values.posix, + }); + const cmd = values.cmd; + if (!cmd) { + matches.forEach(m => console.log(m)); + stream.on('data', f => console.log(f)); + } + else { + stream.on('data', f => matches.push(f)); + stream.on('end', () => foregroundChild(cmd, matches, { shell: true })); + } +} +catch (e) { + console.error(j.usage()); + console.error(e instanceof Error ? e.message : String(e)); + process.exit(1); +} +//# sourceMappingURL=bin.mjs.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/esm/glob.js b/node_modules/archiver-utils/node_modules/glob/dist/esm/glob.js new file mode 100644 index 000000000..c9ff3b003 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/esm/glob.js @@ -0,0 +1,243 @@ +import { Minimatch } from 'minimatch'; +import { fileURLToPath } from 'node:url'; +import { PathScurry, PathScurryDarwin, PathScurryPosix, PathScurryWin32, } from 'path-scurry'; +import { Pattern } from './pattern.js'; +import { GlobStream, GlobWalker } from './walker.js'; +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +export class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + includeChildMatches; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = fileURLToPath(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.includeChildMatches = opts.includeChildMatches !== false; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === + false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' ? PathScurryWin32 + : opts.platform === 'darwin' ? PathScurryDarwin + : opts.platform ? PathScurryPosix + : PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + const g = globParts[i]; + /* c8 ignore start */ + if (!g) + throw new Error('invalid pattern object'); + /* c8 ignore stop */ + return new Pattern(set, g, 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walk()), + ]; + } + walkSync() { + return [ + ...new GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walkSync(), + ]; + } + stream() { + return new GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).stream(); + } + streamSync() { + return new GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/esm/has-magic.js b/node_modules/archiver-utils/node_modules/glob/dist/esm/has-magic.js new file mode 100644 index 000000000..ba2321ab8 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/esm/has-magic.js @@ -0,0 +1,23 @@ +import { Minimatch } from 'minimatch'; +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +export const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/esm/ignore.js b/node_modules/archiver-utils/node_modules/glob/dist/esm/ignore.js new file mode 100644 index 000000000..539c4a4fd --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/esm/ignore.js @@ -0,0 +1,115 @@ +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +import { Minimatch } from 'minimatch'; +import { Pattern } from './pattern.js'; +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +export class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + platform; + mmopts; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + this.platform = platform; + this.mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + for (const ign of ignored) + this.add(ign); + } + add(ign) { + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + const mm = new Minimatch(ign, this.mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + /* c8 ignore start */ + if (!parsed || !globParts) { + throw new Error('invalid pattern object'); + } + // strip off leading ./ portions + // https://github.com/isaacs/node-glob/issues/570 + while (parsed[0] === '.' && globParts[0] === '.') { + parsed.shift(); + globParts.shift(); + } + /* c8 ignore stop */ + const p = new Pattern(parsed, globParts, 0, this.platform); + const m = new Minimatch(p.globString(), this.mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + return true; + } + return false; + } +} +//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/esm/index.js b/node_modules/archiver-utils/node_modules/glob/dist/esm/index.js new file mode 100644 index 000000000..e15c1f9c4 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/esm/index.js @@ -0,0 +1,55 @@ +import { escape, unescape } from 'minimatch'; +import { Glob } from './glob.js'; +import { hasMagic } from './has-magic.js'; +export { escape, unescape } from 'minimatch'; +export { Glob } from './glob.js'; +export { hasMagic } from './has-magic.js'; +export { Ignore } from './ignore.js'; +export function globStreamSync(pattern, options = {}) { + return new Glob(pattern, options).streamSync(); +} +export function globStream(pattern, options = {}) { + return new Glob(pattern, options).stream(); +} +export function globSync(pattern, options = {}) { + return new Glob(pattern, options).walkSync(); +} +async function glob_(pattern, options = {}) { + return new Glob(pattern, options).walk(); +} +export function globIterateSync(pattern, options = {}) { + return new Glob(pattern, options).iterateSync(); +} +export function globIterate(pattern, options = {}) { + return new Glob(pattern, options).iterate(); +} +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +export const streamSync = globStreamSync; +export const stream = Object.assign(globStream, { sync: globStreamSync }); +export const iterateSync = globIterateSync; +export const iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +export const sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +export const glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync, + globStream, + stream, + globStreamSync, + streamSync, + globIterate, + iterate, + globIterateSync, + iterateSync, + Glob, + hasMagic, + escape, + unescape, +}); +glob.glob = glob; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/esm/package.json b/node_modules/archiver-utils/node_modules/glob/dist/esm/package.json new file mode 100644 index 000000000..3dbc1ca59 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/archiver-utils/node_modules/glob/dist/esm/pattern.js b/node_modules/archiver-utils/node_modules/glob/dist/esm/pattern.js new file mode 100644 index 000000000..b41defa10 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/esm/pattern.js @@ -0,0 +1,215 @@ +// this is just a very light wrapper around 2 arrays with an offset index +import { GLOBSTAR } from 'minimatch'; +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +export class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 ? + this.isAbsolute() ? + this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined ? + this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined ? + this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined ? + this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ? + p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/esm/processor.js b/node_modules/archiver-utils/node_modules/glob/dist/esm/processor.js new file mode 100644 index 000000000..f874892ff --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/esm/processor.js @@ -0,0 +1,294 @@ +// synchronous utility for filtering entries and calculating subwalks +import { GLOBSTAR } from 'minimatch'; +/** + * A cache of which patterns have been processed for a given Path + */ +export class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +export class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +export class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +export class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = + hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined ? + this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must not be final entry, otherwise we would have + // concatenated it earlier. + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + continue; + } + else if (p === GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/dist/esm/walker.js b/node_modules/archiver-utils/node_modules/glob/dist/esm/walker.js new file mode 100644 index 000000000..3d68196c4 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/dist/esm/walker.js @@ -0,0 +1,381 @@ +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +import { Minipass } from 'minipass'; +import { Ignore } from './ignore.js'; +import { Processor } from './processor.js'; +const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new Ignore([ignore], opts) + : Array.isArray(ignore) ? new Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +export class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + includeChildMatches; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + this.includeChildMatches = opts.includeChildMatches !== false; + if (opts.ignore || !this.includeChildMatches) { + this.#ignore = makeIgnore(opts.ignore ?? [], opts); + if (!this.includeChildMatches && + typeof this.#ignore.add !== 'function') { + const m = 'cannot ignore child matches, ignore lacks add() method.'; + throw new Error(m); + } + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? await e.lstat() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = await s.realpath(); + /* c8 ignore start */ + if (target && (target.isUnknown() || this.opts.stat)) { + await target.lstat(); + } + /* c8 ignore stop */ + } + return this.matchCheckTest(s, ifDir); + } + matchCheckTest(e, ifDir) { + return (e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + (!this.opts.nodir || + !this.opts.follow || + !e.isSymbolicLink() || + !e.realpathCached()?.isDirectory()) && + !this.#ignored(e)) ? + e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? e.lstatSync() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = s.realpathSync(); + if (target && (target?.isUnknown() || this.opts.stat)) { + target.lstatSync(); + } + } + return this.matchCheckTest(s, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + // we know we have an ignore if this is false, but TS doesn't + if (!this.includeChildMatches && this.#ignore?.add) { + const ign = `${e.relativePosix()}/**`; + this.#ignore.add(ign); + } + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ? + '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +export class GlobWalker extends GlobUtil { + matches = new Set(); + constructor(patterns, path, opts) { + super(patterns, path, opts); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +export class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/glob/package.json b/node_modules/archiver-utils/node_modules/glob/package.json new file mode 100644 index 000000000..4838947df --- /dev/null +++ b/node_modules/archiver-utils/node_modules/glob/package.json @@ -0,0 +1,98 @@ +{ + "author": "Isaac Z. Schlueter (https://blog.izs.me/)", + "name": "glob", + "description": "the most correct and second fastest glob implementation in JavaScript", + "version": "10.4.2", + "type": "module", + "tshy": { + "main": true, + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "bin": "./dist/esm/bin.mjs", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --log-level warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts", + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts", + "prebench": "npm run prepare", + "bench": "bash benchmark.sh", + "preprof": "npm run prepare", + "prof": "bash prof.sh", + "benchclean": "node benchclean.cjs" + }, + "prettier": { + "experimentalTernaries": true, + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "devDependencies": { + "@types/node": "^20.11.30", + "memfs": "^3.4.13", + "mkdirp": "^3.0.1", + "prettier": "^3.2.5", + "rimraf": "^5.0.7", + "sync-content": "^1.0.2", + "tap": "^19.0.0", + "tshy": "^1.14.0", + "typedoc": "^0.25.12" + }, + "tap": { + "before": "test/00-setup.ts" + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + } +} diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js new file mode 100644 index 000000000..5fc86bbd0 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.assertValidPattern = void 0; +const MAX_PATTERN_LENGTH = 1024 * 64; +const assertValidPattern = (pattern) => { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern'); + } + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long'); + } +}; +exports.assertValidPattern = assertValidPattern; +//# sourceMappingURL=assert-valid-pattern.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/ast.js new file mode 100644 index 000000000..7b2109625 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/ast.js @@ -0,0 +1,592 @@ +"use strict"; +// parse a single path portion +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AST = void 0; +const brace_expressions_js_1 = require("./brace-expressions.js"); +const unescape_js_1 = require("./unescape.js"); +const types = new Set(['!', '?', '+', '*', '@']); +const isExtglobType = (c) => types.has(c); +// Patterns that get prepended to bind to the start of either the +// entire string, or just a single path portion, to prevent dots +// and/or traversal patterns, when needed. +// Exts don't need the ^ or / bit, because the root binds that already. +const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))'; +const startNoDot = '(?!\\.)'; +// characters that indicate a start of pattern needs the "no dots" bit, +// because a dot *might* be matched. ( is not in the list, because in +// the case of a child extglob, it will handle the prevention itself. +const addPatternStart = new Set(['[', '.']); +// cases where traversal is A-OK, no dot prevention needed +const justDots = new Set(['..', '.']); +const reSpecials = new Set('().*{}+?[]^$\\!'); +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// any single thing other than / +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// use + when we need to ensure that *something* matches, because the * is +// the only thing in the path portion. +const starNoEmpty = qmark + '+?'; +// remove the \ chars that we added if we end up doing a nonmagic compare +// const deslash = (s: string) => s.replace(/\\(.)/g, '$1') +class AST { + type; + #root; + #hasMagic; + #uflag = false; + #parts = []; + #parent; + #parentIndex; + #negs; + #filledNegs = false; + #options; + #toString; + // set to true if it's an extglob with no children + // (which really means one child of '') + #emptyExt = false; + constructor(type, parent, options = {}) { + this.type = type; + // extglobs are inherently magical + if (type) + this.#hasMagic = true; + this.#parent = parent; + this.#root = this.#parent ? this.#parent.#root : this; + this.#options = this.#root === this ? options : this.#root.#options; + this.#negs = this.#root === this ? [] : this.#root.#negs; + if (type === '!' && !this.#root.#filledNegs) + this.#negs.push(this); + this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0; + } + get hasMagic() { + /* c8 ignore start */ + if (this.#hasMagic !== undefined) + return this.#hasMagic; + /* c8 ignore stop */ + for (const p of this.#parts) { + if (typeof p === 'string') + continue; + if (p.type || p.hasMagic) + return (this.#hasMagic = true); + } + // note: will be undefined until we generate the regexp src and find out + return this.#hasMagic; + } + // reconstructs the pattern + toString() { + if (this.#toString !== undefined) + return this.#toString; + if (!this.type) { + return (this.#toString = this.#parts.map(p => String(p)).join('')); + } + else { + return (this.#toString = + this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')'); + } + } + #fillNegs() { + /* c8 ignore start */ + if (this !== this.#root) + throw new Error('should only call on root'); + if (this.#filledNegs) + return this; + /* c8 ignore stop */ + // call toString() once to fill this out + this.toString(); + this.#filledNegs = true; + let n; + while ((n = this.#negs.pop())) { + if (n.type !== '!') + continue; + // walk up the tree, appending everthing that comes AFTER parentIndex + let p = n; + let pp = p.#parent; + while (pp) { + for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) { + for (const part of n.#parts) { + /* c8 ignore start */ + if (typeof part === 'string') { + throw new Error('string part in extglob AST??'); + } + /* c8 ignore stop */ + part.copyIn(pp.#parts[i]); + } + } + p = pp; + pp = p.#parent; + } + } + return this; + } + push(...parts) { + for (const p of parts) { + if (p === '') + continue; + /* c8 ignore start */ + if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) { + throw new Error('invalid part: ' + p); + } + /* c8 ignore stop */ + this.#parts.push(p); + } + } + toJSON() { + const ret = this.type === null + ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON())) + : [this.type, ...this.#parts.map(p => p.toJSON())]; + if (this.isStart() && !this.type) + ret.unshift([]); + if (this.isEnd() && + (this === this.#root || + (this.#root.#filledNegs && this.#parent?.type === '!'))) { + ret.push({}); + } + return ret; + } + isStart() { + if (this.#root === this) + return true; + // if (this.type) return !!this.#parent?.isStart() + if (!this.#parent?.isStart()) + return false; + if (this.#parentIndex === 0) + return true; + // if everything AHEAD of this is a negation, then it's still the "start" + const p = this.#parent; + for (let i = 0; i < this.#parentIndex; i++) { + const pp = p.#parts[i]; + if (!(pp instanceof AST && pp.type === '!')) { + return false; + } + } + return true; + } + isEnd() { + if (this.#root === this) + return true; + if (this.#parent?.type === '!') + return true; + if (!this.#parent?.isEnd()) + return false; + if (!this.type) + return this.#parent?.isEnd(); + // if not root, it'll always have a parent + /* c8 ignore start */ + const pl = this.#parent ? this.#parent.#parts.length : 0; + /* c8 ignore stop */ + return this.#parentIndex === pl - 1; + } + copyIn(part) { + if (typeof part === 'string') + this.push(part); + else + this.push(part.clone(this)); + } + clone(parent) { + const c = new AST(this.type, parent); + for (const p of this.#parts) { + c.copyIn(p); + } + return c; + } + static #parseAST(str, ast, pos, opt) { + let escaping = false; + let inBrace = false; + let braceStart = -1; + let braceNeg = false; + if (ast.type === null) { + // outside of a extglob, append until we find a start + let i = pos; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') { + ast.push(acc); + acc = ''; + const ext = new AST(c, ast); + i = AST.#parseAST(str, ext, i, opt); + ast.push(ext); + continue; + } + acc += c; + } + ast.push(acc); + return i; + } + // some kind of extglob, pos is at the ( + // find the next | or ) + let i = pos + 1; + let part = new AST(null, ast); + const parts = []; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (isExtglobType(c) && str.charAt(i) === '(') { + part.push(acc); + acc = ''; + const ext = new AST(c, part); + part.push(ext); + i = AST.#parseAST(str, ext, i, opt); + continue; + } + if (c === '|') { + part.push(acc); + acc = ''; + parts.push(part); + part = new AST(null, ast); + continue; + } + if (c === ')') { + if (acc === '' && ast.#parts.length === 0) { + ast.#emptyExt = true; + } + part.push(acc); + acc = ''; + ast.push(...parts, part); + return i; + } + acc += c; + } + // unfinished extglob + // if we got here, it was a malformed extglob! not an extglob, but + // maybe something else in there. + ast.type = null; + ast.#hasMagic = undefined; + ast.#parts = [str.substring(pos - 1)]; + return i; + } + static fromGlob(pattern, options = {}) { + const ast = new AST(null, undefined, options); + AST.#parseAST(pattern, ast, 0, options); + return ast; + } + // returns the regular expression if there's magic, or the unescaped + // string if not. + toMMPattern() { + // should only be called on root + /* c8 ignore start */ + if (this !== this.#root) + return this.#root.toMMPattern(); + /* c8 ignore stop */ + const glob = this.toString(); + const [re, body, hasMagic, uflag] = this.toRegExpSource(); + // if we're in nocase mode, and not nocaseMagicOnly, then we do + // still need a regular expression if we have to case-insensitively + // match capital/lowercase characters. + const anyMagic = hasMagic || + this.#hasMagic || + (this.#options.nocase && + !this.#options.nocaseMagicOnly && + glob.toUpperCase() !== glob.toLowerCase()); + if (!anyMagic) { + return body; + } + const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : ''); + return Object.assign(new RegExp(`^${re}$`, flags), { + _src: re, + _glob: glob, + }); + } + get options() { + return this.#options; + } + // returns the string match, the regexp source, whether there's magic + // in the regexp (so a regular expression is required) and whether or + // not the uflag is needed for the regular expression (for posix classes) + // TODO: instead of injecting the start/end at this point, just return + // the BODY of the regexp, along with the start/end portions suitable + // for binding the start/end in either a joined full-path makeRe context + // (where we bind to (^|/), or a standalone matchPart context (where + // we bind to ^, and not /). Otherwise slashes get duped! + // + // In part-matching mode, the start is: + // - if not isStart: nothing + // - if traversal possible, but not allowed: ^(?!\.\.?$) + // - if dots allowed or not possible: ^ + // - if dots possible and not allowed: ^(?!\.) + // end is: + // - if not isEnd(): nothing + // - else: $ + // + // In full-path matching mode, we put the slash at the START of the + // pattern, so start is: + // - if first pattern: same as part-matching mode + // - if not isStart(): nothing + // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/)) + // - if dots allowed or not possible: / + // - if dots possible and not allowed: /(?!\.) + // end is: + // - if last pattern, same as part-matching mode + // - else nothing + // + // Always put the (?:$|/) on negated tails, though, because that has to be + // there to bind the end of the negated pattern portion, and it's easier to + // just stick it in now rather than try to inject it later in the middle of + // the pattern. + // + // We can just always return the same end, and leave it up to the caller + // to know whether it's going to be used joined or in parts. + // And, if the start is adjusted slightly, can do the same there: + // - if not isStart: nothing + // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$) + // - if dots allowed or not possible: (?:/|^) + // - if dots possible and not allowed: (?:/|^)(?!\.) + // + // But it's better to have a simpler binding without a conditional, for + // performance, so probably better to return both start options. + // + // Then the caller just ignores the end if it's not the first pattern, + // and the start always gets applied. + // + // But that's always going to be $ if it's the ending pattern, or nothing, + // so the caller can just attach $ at the end of the pattern when building. + // + // So the todo is: + // - better detect what kind of start is needed + // - return both flavors of starting pattern + // - attach $ at the end of the pattern when creating the actual RegExp + // + // Ah, but wait, no, that all only applies to the root when the first pattern + // is not an extglob. If the first pattern IS an extglob, then we need all + // that dot prevention biz to live in the extglob portions, because eg + // +(*|.x*) can match .xy but not .yx. + // + // So, return the two flavors if it's #root and the first child is not an + // AST, otherwise leave it to the child AST to handle it, and there, + // use the (?:^|/) style of start binding. + // + // Even simplified further: + // - Since the start for a join is eg /(?!\.) and the start for a part + // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root + // or start or whatever) and prepend ^ or / at the Regexp construction. + toRegExpSource(allowDot) { + const dot = allowDot ?? !!this.#options.dot; + if (this.#root === this) + this.#fillNegs(); + if (!this.type) { + const noEmpty = this.isStart() && this.isEnd(); + const src = this.#parts + .map(p => { + const [re, _, hasMagic, uflag] = typeof p === 'string' + ? AST.#parseGlob(p, this.#hasMagic, noEmpty) + : p.toRegExpSource(allowDot); + this.#hasMagic = this.#hasMagic || hasMagic; + this.#uflag = this.#uflag || uflag; + return re; + }) + .join(''); + let start = ''; + if (this.isStart()) { + if (typeof this.#parts[0] === 'string') { + // this is the string that will match the start of the pattern, + // so we need to protect against dots and such. + // '.' and '..' cannot match unless the pattern is that exactly, + // even if it starts with . or dot:true is set. + const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]); + if (!dotTravAllowed) { + const aps = addPatternStart; + // check if we have a possibility of matching . or .., + // and prevent that. + const needNoTrav = + // dots are allowed, and the pattern starts with [ or . + (dot && aps.has(src.charAt(0))) || + // the pattern starts with \., and then [ or . + (src.startsWith('\\.') && aps.has(src.charAt(2))) || + // the pattern starts with \.\., and then [ or . + (src.startsWith('\\.\\.') && aps.has(src.charAt(4))); + // no need to prevent dots if it can't match a dot, or if a + // sub-pattern will be preventing it anyway. + const needNoDot = !dot && !allowDot && aps.has(src.charAt(0)); + start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : ''; + } + } + } + // append the "end of path portion" pattern to negation tails + let end = ''; + if (this.isEnd() && + this.#root.#filledNegs && + this.#parent?.type === '!') { + end = '(?:$|\\/)'; + } + const final = start + src + end; + return [ + final, + (0, unescape_js_1.unescape)(src), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + // We need to calculate the body *twice* if it's a repeat pattern + // at the start, once in nodot mode, then again in dot mode, so a + // pattern like *(?) can match 'x.y' + const repeated = this.type === '*' || this.type === '+'; + // some kind of extglob + const start = this.type === '!' ? '(?:(?!(?:' : '(?:'; + let body = this.#partsToRegExp(dot); + if (this.isStart() && this.isEnd() && !body && this.type !== '!') { + // invalid extglob, has to at least be *something* present, if it's + // the entire path portion. + const s = this.toString(); + this.#parts = [s]; + this.type = null; + this.#hasMagic = undefined; + return [s, (0, unescape_js_1.unescape)(this.toString()), false, false]; + } + // XXX abstract out this map method + let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot + ? '' + : this.#partsToRegExp(true); + if (bodyDotAllowed === body) { + bodyDotAllowed = ''; + } + if (bodyDotAllowed) { + body = `(?:${body})(?:${bodyDotAllowed})*?`; + } + // an empty !() is exactly equivalent to a starNoEmpty + let final = ''; + if (this.type === '!' && this.#emptyExt) { + final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty; + } + else { + const close = this.type === '!' + ? // !() must match something,but !(x) can match '' + '))' + + (this.isStart() && !dot && !allowDot ? startNoDot : '') + + star + + ')' + : this.type === '@' + ? ')' + : this.type === '?' + ? ')?' + : this.type === '+' && bodyDotAllowed + ? ')' + : this.type === '*' && bodyDotAllowed + ? `)?` + : `)${this.type}`; + final = start + body + close; + } + return [ + final, + (0, unescape_js_1.unescape)(body), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + #partsToRegExp(dot) { + return this.#parts + .map(p => { + // extglob ASTs should only contain parent ASTs + /* c8 ignore start */ + if (typeof p === 'string') { + throw new Error('string type in extglob ast??'); + } + /* c8 ignore stop */ + // can ignore hasMagic, because extglobs are already always magic + const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot); + this.#uflag = this.#uflag || uflag; + return re; + }) + .filter(p => !(this.isStart() && this.isEnd()) || !!p) + .join('|'); + } + static #parseGlob(glob, hasMagic, noEmpty = false) { + let escaping = false; + let re = ''; + let uflag = false; + for (let i = 0; i < glob.length; i++) { + const c = glob.charAt(i); + if (escaping) { + escaping = false; + re += (reSpecials.has(c) ? '\\' : '') + c; + continue; + } + if (c === '\\') { + if (i === glob.length - 1) { + re += '\\\\'; + } + else { + escaping = true; + } + continue; + } + if (c === '[') { + const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i); + if (consumed) { + re += src; + uflag = uflag || needUflag; + i += consumed - 1; + hasMagic = hasMagic || magic; + continue; + } + } + if (c === '*') { + if (noEmpty && glob === '*') + re += starNoEmpty; + else + re += star; + hasMagic = true; + continue; + } + if (c === '?') { + re += qmark; + hasMagic = true; + continue; + } + re += regExpEscape(c); + } + return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag]; + } +} +exports.AST = AST; +//# sourceMappingURL=ast.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/brace-expressions.js new file mode 100644 index 000000000..0e13eefc4 --- /dev/null +++ b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/brace-expressions.js @@ -0,0 +1,152 @@ +"use strict"; +// translate the various posix character classes into unicode properties +// this works across all unicode locales +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseClass = void 0; +// { : [, /u flag required, negated] +const posixClasses = { + '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true], + '[:alpha:]': ['\\p{L}\\p{Nl}', true], + '[:ascii:]': ['\\x' + '00-\\x' + '7f', false], + '[:blank:]': ['\\p{Zs}\\t', true], + '[:cntrl:]': ['\\p{Cc}', true], + '[:digit:]': ['\\p{Nd}', true], + '[:graph:]': ['\\p{Z}\\p{C}', true, true], + '[:lower:]': ['\\p{Ll}', true], + '[:print:]': ['\\p{C}', true], + '[:punct:]': ['\\p{P}', true], + '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true], + '[:upper:]': ['\\p{Lu}', true], + '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true], + '[:xdigit:]': ['A-Fa-f0-9', false], +}; +// only need to escape a few things inside of brace expressions +// escapes: [ \ ] - +const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&'); +// escape all regexp magic characters +const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// everything has already been escaped, we just have to join +const rangesToString = (ranges) => ranges.join(''); +// takes a glob string at a posix brace expression, and returns +// an equivalent regular expression source, and boolean indicating +// whether the /u flag needs to be applied, and the number of chars +// consumed to parse the character class. +// This also removes out of order ranges, and returns ($.) if the +// entire class just no good. +const parseClass = (glob, position) => { + const pos = position; + /* c8 ignore start */ + if (glob.charAt(pos) !== '[') { + throw new Error('not in a brace expression'); + } + /* c8 ignore stop */ + const ranges = []; + const negs = []; + let i = pos + 1; + let sawStart = false; + let uflag = false; + let escaping = false; + let negate = false; + let endPos = pos; + let rangeStart = ''; + WHILE: while (i < glob.length) { + const c = glob.charAt(i); + if ((c === '!' || c === '^') && i === pos + 1) { + negate = true; + i++; + continue; + } + if (c === ']' && sawStart && !escaping) { + endPos = i + 1; + break; + } + sawStart = true; + if (c === '\\') { + if (!escaping) { + escaping = true; + i++; + continue; + } + // escaped \ char, fall through and treat like normal char + } + if (c === '[' && !escaping) { + // either a posix class, a collation equivalent, or just a [ + for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { + if (glob.startsWith(cls, i)) { + // invalid, [a-[] is fine, but not [a-[:alpha]] + if (rangeStart) { + return ['$.', false, glob.length - pos, true]; + } + i += cls.length; + if (neg) + negs.push(unip); + else + ranges.push(unip); + uflag = uflag || u; + continue WHILE; + } + } + } + // now it's just a normal character, effectively + escaping = false; + if (rangeStart) { + // throw this range away if it's not valid, but others + // can still match. + if (c > rangeStart) { + ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c)); + } + else if (c === rangeStart) { + ranges.push(braceEscape(c)); + } + rangeStart = ''; + i++; + continue; + } + // now might be the start of a range. + // can be either c-d or c-] or c] or c] at this point + if (glob.startsWith('-]', i + 1)) { + ranges.push(braceEscape(c + '-')); + i += 2; + continue; + } + if (glob.startsWith('-', i + 1)) { + rangeStart = c; + i += 2; + continue; + } + // not the start of a range, just a single character + ranges.push(braceEscape(c)); + i++; + } + if (endPos < i) { + // didn't see the end of the class, not a valid class, + // but might still be valid as a literal match. + return ['', false, 0, false]; + } + // if we got no ranges and no negates, then we have a range that + // cannot possibly match anything, and that poisons the whole glob + if (!ranges.length && !negs.length) { + return ['$.', false, glob.length - pos, true]; + } + // if we got one positive range, and it's a single character, then that's + // not actually a magic pattern, it's just that one literal character. + // we should not treat that as "magic", we should just return the literal + // character. [_] is a perfectly valid way to escape glob magic chars. + if (negs.length === 0 && + ranges.length === 1 && + /^\\?.$/.test(ranges[0]) && + !negate) { + const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0]; + return [regexpEscape(r), false, endPos - pos, false]; + } + const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']'; + const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']'; + const comb = ranges.length && negs.length + ? '(' + sranges + '|' + snegs + ')' + : ranges.length + ? sranges + : snegs; + return [comb, uflag, endPos - pos, true]; +}; +exports.parseClass = parseClass; +//# sourceMappingURL=brace-expressions.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/escape.js new file mode 100644 index 000000000..02a4f8a8e --- /dev/null +++ b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/escape.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.escape = void 0; +/** + * Escape all magic characters in a glob pattern. + * + * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape} + * option is used, then characters are escaped by wrapping in `[]`, because + * a magic character wrapped in a character class can only be satisfied by + * that exact character. In this mode, `\` is _not_ escaped, because it is + * not interpreted as a magic character, but instead as a path separator. + */ +const escape = (s, { windowsPathsNoEscape = false, } = {}) => { + // don't need to escape +@! because we escape the parens + // that make those magic, and escaping ! as [!] isn't valid, + // because [!]] is a valid glob class meaning not ']'. + return windowsPathsNoEscape + ? s.replace(/[?*()[\]]/g, '[$&]') + : s.replace(/[?*()[\]\\]/g, '\\$&'); +}; +exports.escape = escape; +//# sourceMappingURL=escape.js.map \ No newline at end of file diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/index.js b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/index.js new file mode 100644 index 000000000..d05f8b47f --- /dev/null +++ b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/index.js @@ -0,0 +1,1016 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0; +const brace_expansion_1 = __importDefault(require("brace-expansion")); +const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js"); +const ast_js_1 = require("./ast.js"); +const escape_js_1 = require("./escape.js"); +const unescape_js_1 = require("./unescape.js"); +const minimatch = (p, pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false; + } + return new Minimatch(pattern, options).match(p); +}; +exports.minimatch = minimatch; +// Optimized checking for the most common glob patterns. +const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/; +const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext); +const starDotExtTestDot = (ext) => (f) => f.endsWith(ext); +const starDotExtTestNocase = (ext) => { + ext = ext.toLowerCase(); + return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext); +}; +const starDotExtTestNocaseDot = (ext) => { + ext = ext.toLowerCase(); + return (f) => f.toLowerCase().endsWith(ext); +}; +const starDotStarRE = /^\*+\.\*+$/; +const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.'); +const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.'); +const dotStarRE = /^\.\*+$/; +const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.'); +const starRE = /^\*+$/; +const starTest = (f) => f.length !== 0 && !f.startsWith('.'); +const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..'; +const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/; +const qmarksTestNocase = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestNocaseDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTest = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTestNoExt = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && !f.startsWith('.'); +}; +const qmarksTestNoExtDot = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && f !== '.' && f !== '..'; +}; +/* c8 ignore start */ +const defaultPlatform = (typeof process === 'object' && process + ? (typeof process.env === 'object' && + process.env && + process.env.__MINIMATCH_TESTING_PLATFORM__) || + process.platform + : 'posix'); +const path = { + win32: { sep: '\\' }, + posix: { sep: '/' }, +}; +/* c8 ignore stop */ +exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep; +exports.minimatch.sep = exports.sep; +exports.GLOBSTAR = Symbol('globstar **'); +exports.minimatch.GLOBSTAR = exports.GLOBSTAR; +// any single thing other than / +// don't need to escape / when using new RegExp() +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?'; +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?'; +const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options); +exports.filter = filter; +exports.minimatch.filter = exports.filter; +const ext = (a, b = {}) => Object.assign({}, a, b); +const defaults = (def) => { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return exports.minimatch; + } + const orig = exports.minimatch; + const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options)); + return Object.assign(m, { + Minimatch: class Minimatch extends orig.Minimatch { + constructor(pattern, options = {}) { + super(pattern, ext(def, options)); + } + static defaults(options) { + return orig.defaults(ext(def, options)).Minimatch; + } + }, + AST: class AST extends orig.AST { + /* c8 ignore start */ + constructor(type, parent, options = {}) { + super(type, parent, ext(def, options)); + } + /* c8 ignore stop */ + static fromGlob(pattern, options = {}) { + return orig.AST.fromGlob(pattern, ext(def, options)); + } + }, + unescape: (s, options = {}) => orig.unescape(s, ext(def, options)), + escape: (s, options = {}) => orig.escape(s, ext(def, options)), + filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)), + defaults: (options) => orig.defaults(ext(def, options)), + makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)), + braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)), + match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)), + sep: orig.sep, + GLOBSTAR: exports.GLOBSTAR, + }); +}; +exports.defaults = defaults; +exports.minimatch.defaults = exports.defaults; +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +const braceExpand = (pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern]; + } + return (0, brace_expansion_1.default)(pattern); +}; +exports.braceExpand = braceExpand; +exports.minimatch.braceExpand = exports.braceExpand; +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe(); +exports.makeRe = makeRe; +exports.minimatch.makeRe = exports.makeRe; +const match = (list, pattern, options = {}) => { + const mm = new Minimatch(pattern, options); + list = list.filter(f => mm.match(f)); + if (mm.options.nonull && !list.length) { + list.push(pattern); + } + return list; +}; +exports.match = match; +exports.minimatch.match = exports.match; +// replace stuff like \* with * +const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/; +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +class Minimatch { + options; + set; + pattern; + windowsPathsNoEscape; + nonegate; + negate; + comment; + empty; + preserveMultipleSlashes; + partial; + globSet; + globParts; + nocase; + isWindows; + platform; + windowsNoMagicRoot; + regexp; + constructor(pattern, options = {}) { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + options = options || {}; + this.options = options; + this.pattern = pattern; + this.platform = options.platform || defaultPlatform; + this.isWindows = this.platform === 'win32'; + this.windowsPathsNoEscape = + !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + this.pattern = this.pattern.replace(/\\/g, '/'); + } + this.preserveMultipleSlashes = !!options.preserveMultipleSlashes; + this.regexp = null; + this.negate = false; + this.nonegate = !!options.nonegate; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.nocase = !!this.options.nocase; + this.windowsNoMagicRoot = + options.windowsNoMagicRoot !== undefined + ? options.windowsNoMagicRoot + : !!(this.isWindows && this.nocase); + this.globSet = []; + this.globParts = []; + this.set = []; + // make the set of regexps etc. + this.make(); + } + hasMagic() { + if (this.options.magicalBraces && this.set.length > 1) { + return true; + } + for (const pattern of this.set) { + for (const part of pattern) { + if (typeof part !== 'string') + return true; + } + } + return false; + } + debug(..._) { } + make() { + const pattern = this.pattern; + const options = this.options; + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true; + return; + } + if (!pattern) { + this.empty = true; + return; + } + // step 1: figure out negation, etc. + this.parseNegate(); + // step 2: expand braces + this.globSet = [...new Set(this.braceExpand())]; + if (options.debug) { + this.debug = (...args) => console.error(...args); + } + this.debug(this.pattern, this.globSet); + // step 3: now we have a set, so turn each one into a series of + // path-portion matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + // + // First, we preprocess to make the glob pattern sets a bit simpler + // and deduped. There are some perf-killing patterns that can cause + // problems with a glob walk, but we can simplify them down a bit. + const rawGlobParts = this.globSet.map(s => this.slashSplit(s)); + this.globParts = this.preprocess(rawGlobParts); + this.debug(this.pattern, this.globParts); + // glob --> regexps + let set = this.globParts.map((s, _, __) => { + if (this.isWindows && this.windowsNoMagicRoot) { + // check if it's a drive or unc path. + const isUNC = s[0] === '' && + s[1] === '' && + (s[2] === '?' || !globMagic.test(s[2])) && + !globMagic.test(s[3]); + const isDrive = /^[a-z]:/i.test(s[0]); + if (isUNC) { + return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))]; + } + else if (isDrive) { + return [s[0], ...s.slice(1).map(ss => this.parse(ss))]; + } + } + return s.map(ss => this.parse(ss)); + }); + this.debug(this.pattern, set); + // filter out everything that didn't compile properly. + this.set = set.filter(s => s.indexOf(false) === -1); + // do not treat the ? in UNC paths as magic + if (this.isWindows) { + for (let i = 0; i < this.set.length; i++) { + const p = this.set[i]; + if (p[0] === '' && + p[1] === '' && + this.globParts[i][2] === '?' && + typeof p[3] === 'string' && + /^[a-z]:$/i.test(p[3])) { + p[2] = '?'; + } + } + } + this.debug(this.pattern, this.set); + } + // various transforms to equivalent pattern sets that are + // faster to process in a filesystem walk. The goal is to + // eliminate what we can, and push all ** patterns as far + // to the right as possible, even if it increases the number + // of patterns that we have to process. + preprocess(globParts) { + // if we're not in globstar mode, then turn all ** into * + if (this.options.noglobstar) { + for (let i = 0; i < globParts.length; i++) { + for (let j = 0; j < globParts[i].length; j++) { + if (globParts[i][j] === '**') { + globParts[i][j] = '*'; + } + } + } + } + const { optimizationLevel = 1 } = this.options; + if (optimizationLevel >= 2) { + // aggressive optimization for the purpose of fs walking + globParts = this.firstPhasePreProcess(globParts); + globParts = this.secondPhasePreProcess(globParts); + } + else if (optimizationLevel >= 1) { + // just basic optimizations to remove some .. parts + globParts = this.levelOneOptimize(globParts); + } + else { + // just collapse multiple ** portions into one + globParts = this.adjascentGlobstarOptimize(globParts); + } + return globParts; + } + // just get rid of adjascent ** portions + adjascentGlobstarOptimize(globParts) { + return globParts.map(parts => { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let i = gs; + while (parts[i + 1] === '**') { + i++; + } + if (i !== gs) { + parts.splice(gs, i - gs); + } + } + return parts; + }); + } + // get rid of adjascent ** and resolve .. portions + levelOneOptimize(globParts) { + return globParts.map(parts => { + parts = parts.reduce((set, part) => { + const prev = set[set.length - 1]; + if (part === '**' && prev === '**') { + return set; + } + if (part === '..') { + if (prev && prev !== '..' && prev !== '.' && prev !== '**') { + set.pop(); + return set; + } + } + set.push(part); + return set; + }, []); + return parts.length === 0 ? [''] : parts; + }); + } + levelTwoFileOptimize(parts) { + if (!Array.isArray(parts)) { + parts = this.slashSplit(parts); + } + let didSomething = false; + do { + didSomething = false; + //
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (!matched)
+                    continue;
+                globParts[i] = matched;
+                globParts[j] = [];
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === exports.GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return (0, exports.braceExpand)(this.pattern, this.options);
+    }
+    parse(pattern) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return exports.GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === exports.GLOBSTAR
+                        ? exports.GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== exports.GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== exports.GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = exports.GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return exports.minimatch.defaults(def).Minimatch;
+    }
+}
+exports.Minimatch = Minimatch;
+/* c8 ignore start */
+var ast_js_2 = require("./ast.js");
+Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
+var escape_js_2 = require("./escape.js");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
+var unescape_js_2 = require("./unescape.js");
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
+/* c8 ignore stop */
+exports.minimatch.AST = ast_js_1.AST;
+exports.minimatch.Minimatch = Minimatch;
+exports.minimatch.escape = escape_js_1.escape;
+exports.minimatch.unescape = unescape_js_1.unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/package.json b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/package.json
new file mode 100644
index 000000000..5bbefffba
--- /dev/null
+++ b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/unescape.js
new file mode 100644
index 000000000..47c36bcee
--- /dev/null
+++ b/node_modules/archiver-utils/node_modules/minimatch/dist/commonjs/unescape.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = void 0;
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+exports.unescape = unescape;
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/assert-valid-pattern.js
new file mode 100644
index 000000000..7b534fc30
--- /dev/null
+++ b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/assert-valid-pattern.js
@@ -0,0 +1,10 @@
+const MAX_PATTERN_LENGTH = 1024 * 64;
+export const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/esm/ast.js b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/ast.js
new file mode 100644
index 000000000..2d2bced65
--- /dev/null
+++ b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/ast.js
@@ -0,0 +1,588 @@
+// parse a single path portion
+import { parseClass } from './brace-expressions.js';
+import { unescape } from './unescape.js';
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+export class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    get options() {
+        return this.#options;
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav = 
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                unescape(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, unescape(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            unescape(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = parseClass(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, unescape(glob), !!hasMagic, uflag];
+    }
+}
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/brace-expressions.js
new file mode 100644
index 000000000..c629d6ae8
--- /dev/null
+++ b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/brace-expressions.js
@@ -0,0 +1,148 @@
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+export const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/esm/escape.js b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/escape.js
new file mode 100644
index 000000000..16f7c8c7b
--- /dev/null
+++ b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/escape.js
@@ -0,0 +1,18 @@
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/esm/index.js b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/index.js
new file mode 100644
index 000000000..ff6319369
--- /dev/null
+++ b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/index.js
@@ -0,0 +1,1000 @@
+import expand from 'brace-expansion';
+import { assertValidPattern } from './assert-valid-pattern.js';
+import { AST } from './ast.js';
+import { escape } from './escape.js';
+import { unescape } from './unescape.js';
+export const minimatch = (p, pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+minimatch.sep = sep;
+export const GLOBSTAR = Symbol('globstar **');
+minimatch.GLOBSTAR = GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
+minimatch.filter = filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+export const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return minimatch;
+    }
+    const orig = minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: GLOBSTAR,
+    });
+};
+minimatch.defaults = defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+export const braceExpand = (pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return expand(pattern);
+};
+minimatch.braceExpand = braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+minimatch.makeRe = makeRe;
+export const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+minimatch.match = match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+export class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        assertValidPattern(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            // just collapse multiple ** portions into one
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (!matched)
+                    continue;
+                globParts[i] = matched;
+                globParts[j] = [];
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return braceExpand(this.pattern, this.options);
+    }
+    parse(pattern) {
+        assertValidPattern(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === GLOBSTAR
+                        ? GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== GLOBSTAR || prev === GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return minimatch.defaults(def).Minimatch;
+    }
+}
+/* c8 ignore start */
+export { AST } from './ast.js';
+export { escape } from './escape.js';
+export { unescape } from './unescape.js';
+/* c8 ignore stop */
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/esm/package.json b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/package.json
new file mode 100644
index 000000000..3dbc1ca59
--- /dev/null
+++ b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/archiver-utils/node_modules/minimatch/dist/esm/unescape.js b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/unescape.js
new file mode 100644
index 000000000..0faf9a2b7
--- /dev/null
+++ b/node_modules/archiver-utils/node_modules/minimatch/dist/esm/unescape.js
@@ -0,0 +1,20 @@
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/minimatch/package.json b/node_modules/archiver-utils/node_modules/minimatch/package.json
new file mode 100644
index 000000000..2c82c0398
--- /dev/null
+++ b/node_modules/archiver-utils/node_modules/minimatch/package.json
@@ -0,0 +1,82 @@
+{
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
+  "name": "minimatch",
+  "description": "a glob matcher in javascript",
+  "version": "9.0.4",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/minimatch.git"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --loglevel warn",
+    "benchmark": "node benchmark/index.js",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  },
+  "dependencies": {
+    "brace-expansion": "^2.0.1"
+  },
+  "devDependencies": {
+    "@types/brace-expansion": "^1.1.0",
+    "@types/node": "^18.15.11",
+    "@types/tap": "^15.0.8",
+    "eslint-config-prettier": "^8.6.0",
+    "mkdirp": "1",
+    "prettier": "^2.8.2",
+    "tap": "^18.7.2",
+    "ts-node": "^10.9.1",
+    "tshy": "^1.12.0",
+    "typedoc": "^0.23.21",
+    "typescript": "^4.9.3"
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "license": "ISC",
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "type": "module"
+}
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/duplex-browser.js b/node_modules/archiver-utils/node_modules/readable-stream/duplex-browser.js
deleted file mode 100644
index f8b2db83d..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/duplex-browser.js
+++ /dev/null
@@ -1 +0,0 @@
-module.exports = require('./lib/_stream_duplex.js');
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/duplex.js b/node_modules/archiver-utils/node_modules/readable-stream/duplex.js
deleted file mode 100644
index 46924cbfd..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/duplex.js
+++ /dev/null
@@ -1 +0,0 @@
-module.exports = require('./readable').Duplex
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_duplex.js
deleted file mode 100644
index 57003c32d..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_duplex.js
+++ /dev/null
@@ -1,131 +0,0 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-// a duplex stream is just a stream that is both readable and writable.
-// Since JS doesn't have multiple prototypal inheritance, this class
-// prototypally inherits from Readable, and then parasitically from
-// Writable.
-
-'use strict';
-
-/**/
-
-var pna = require('process-nextick-args');
-/**/
-
-/**/
-var objectKeys = Object.keys || function (obj) {
-  var keys = [];
-  for (var key in obj) {
-    keys.push(key);
-  }return keys;
-};
-/**/
-
-module.exports = Duplex;
-
-/**/
-var util = Object.create(require('core-util-is'));
-util.inherits = require('inherits');
-/**/
-
-var Readable = require('./_stream_readable');
-var Writable = require('./_stream_writable');
-
-util.inherits(Duplex, Readable);
-
-{
-  // avoid scope creep, the keys array can then be collected
-  var keys = objectKeys(Writable.prototype);
-  for (var v = 0; v < keys.length; v++) {
-    var method = keys[v];
-    if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method];
-  }
-}
-
-function Duplex(options) {
-  if (!(this instanceof Duplex)) return new Duplex(options);
-
-  Readable.call(this, options);
-  Writable.call(this, options);
-
-  if (options && options.readable === false) this.readable = false;
-
-  if (options && options.writable === false) this.writable = false;
-
-  this.allowHalfOpen = true;
-  if (options && options.allowHalfOpen === false) this.allowHalfOpen = false;
-
-  this.once('end', onend);
-}
-
-Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', {
-  // making it explicit this property is not enumerable
-  // because otherwise some prototype manipulation in
-  // userland will fail
-  enumerable: false,
-  get: function () {
-    return this._writableState.highWaterMark;
-  }
-});
-
-// the no-half-open enforcer
-function onend() {
-  // if we allow half-open state, or if the writable side ended,
-  // then we're ok.
-  if (this.allowHalfOpen || this._writableState.ended) return;
-
-  // no more data can be written.
-  // But allow more writes to happen in this tick.
-  pna.nextTick(onEndNT, this);
-}
-
-function onEndNT(self) {
-  self.end();
-}
-
-Object.defineProperty(Duplex.prototype, 'destroyed', {
-  get: function () {
-    if (this._readableState === undefined || this._writableState === undefined) {
-      return false;
-    }
-    return this._readableState.destroyed && this._writableState.destroyed;
-  },
-  set: function (value) {
-    // we ignore the value if the stream
-    // has not been initialized yet
-    if (this._readableState === undefined || this._writableState === undefined) {
-      return;
-    }
-
-    // backward compatibility, the user is explicitly
-    // managing destroyed
-    this._readableState.destroyed = value;
-    this._writableState.destroyed = value;
-  }
-});
-
-Duplex.prototype._destroy = function (err, cb) {
-  this.push(null);
-  this.end();
-
-  pna.nextTick(cb, err);
-};
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_passthrough.js
deleted file mode 100644
index 612edb4d8..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_passthrough.js
+++ /dev/null
@@ -1,47 +0,0 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-// a passthrough stream.
-// basically just the most minimal sort of Transform stream.
-// Every written chunk gets output as-is.
-
-'use strict';
-
-module.exports = PassThrough;
-
-var Transform = require('./_stream_transform');
-
-/**/
-var util = Object.create(require('core-util-is'));
-util.inherits = require('inherits');
-/**/
-
-util.inherits(PassThrough, Transform);
-
-function PassThrough(options) {
-  if (!(this instanceof PassThrough)) return new PassThrough(options);
-
-  Transform.call(this, options);
-}
-
-PassThrough.prototype._transform = function (chunk, encoding, cb) {
-  cb(null, chunk);
-};
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_readable.js
deleted file mode 100644
index 3af95cb2d..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_readable.js
+++ /dev/null
@@ -1,1019 +0,0 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-'use strict';
-
-/**/
-
-var pna = require('process-nextick-args');
-/**/
-
-module.exports = Readable;
-
-/**/
-var isArray = require('isarray');
-/**/
-
-/**/
-var Duplex;
-/**/
-
-Readable.ReadableState = ReadableState;
-
-/**/
-var EE = require('events').EventEmitter;
-
-var EElistenerCount = function (emitter, type) {
-  return emitter.listeners(type).length;
-};
-/**/
-
-/**/
-var Stream = require('./internal/streams/stream');
-/**/
-
-/**/
-
-var Buffer = require('safe-buffer').Buffer;
-var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};
-function _uint8ArrayToBuffer(chunk) {
-  return Buffer.from(chunk);
-}
-function _isUint8Array(obj) {
-  return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
-}
-
-/**/
-
-/**/
-var util = Object.create(require('core-util-is'));
-util.inherits = require('inherits');
-/**/
-
-/**/
-var debugUtil = require('util');
-var debug = void 0;
-if (debugUtil && debugUtil.debuglog) {
-  debug = debugUtil.debuglog('stream');
-} else {
-  debug = function () {};
-}
-/**/
-
-var BufferList = require('./internal/streams/BufferList');
-var destroyImpl = require('./internal/streams/destroy');
-var StringDecoder;
-
-util.inherits(Readable, Stream);
-
-var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume'];
-
-function prependListener(emitter, event, fn) {
-  // Sadly this is not cacheable as some libraries bundle their own
-  // event emitter implementation with them.
-  if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn);
-
-  // This is a hack to make sure that our error handler is attached before any
-  // userland ones.  NEVER DO THIS. This is here only because this code needs
-  // to continue to work with older versions of Node.js that do not include
-  // the prependListener() method. The goal is to eventually remove this hack.
-  if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]];
-}
-
-function ReadableState(options, stream) {
-  Duplex = Duplex || require('./_stream_duplex');
-
-  options = options || {};
-
-  // Duplex streams are both readable and writable, but share
-  // the same options object.
-  // However, some cases require setting options to different
-  // values for the readable and the writable sides of the duplex stream.
-  // These options can be provided separately as readableXXX and writableXXX.
-  var isDuplex = stream instanceof Duplex;
-
-  // object stream flag. Used to make read(n) ignore n and to
-  // make all the buffer merging and length checks go away
-  this.objectMode = !!options.objectMode;
-
-  if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode;
-
-  // the point at which it stops calling _read() to fill the buffer
-  // Note: 0 is a valid value, means "don't call _read preemptively ever"
-  var hwm = options.highWaterMark;
-  var readableHwm = options.readableHighWaterMark;
-  var defaultHwm = this.objectMode ? 16 : 16 * 1024;
-
-  if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm;
-
-  // cast to ints.
-  this.highWaterMark = Math.floor(this.highWaterMark);
-
-  // A linked list is used to store data chunks instead of an array because the
-  // linked list can remove elements from the beginning faster than
-  // array.shift()
-  this.buffer = new BufferList();
-  this.length = 0;
-  this.pipes = null;
-  this.pipesCount = 0;
-  this.flowing = null;
-  this.ended = false;
-  this.endEmitted = false;
-  this.reading = false;
-
-  // a flag to be able to tell if the event 'readable'/'data' is emitted
-  // immediately, or on a later tick.  We set this to true at first, because
-  // any actions that shouldn't happen until "later" should generally also
-  // not happen before the first read call.
-  this.sync = true;
-
-  // whenever we return null, then we set a flag to say
-  // that we're awaiting a 'readable' event emission.
-  this.needReadable = false;
-  this.emittedReadable = false;
-  this.readableListening = false;
-  this.resumeScheduled = false;
-
-  // has it been destroyed
-  this.destroyed = false;
-
-  // Crypto is kind of old and crusty.  Historically, its default string
-  // encoding is 'binary' so we have to make this configurable.
-  // Everything else in the universe uses 'utf8', though.
-  this.defaultEncoding = options.defaultEncoding || 'utf8';
-
-  // the number of writers that are awaiting a drain event in .pipe()s
-  this.awaitDrain = 0;
-
-  // if true, a maybeReadMore has been scheduled
-  this.readingMore = false;
-
-  this.decoder = null;
-  this.encoding = null;
-  if (options.encoding) {
-    if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
-    this.decoder = new StringDecoder(options.encoding);
-    this.encoding = options.encoding;
-  }
-}
-
-function Readable(options) {
-  Duplex = Duplex || require('./_stream_duplex');
-
-  if (!(this instanceof Readable)) return new Readable(options);
-
-  this._readableState = new ReadableState(options, this);
-
-  // legacy
-  this.readable = true;
-
-  if (options) {
-    if (typeof options.read === 'function') this._read = options.read;
-
-    if (typeof options.destroy === 'function') this._destroy = options.destroy;
-  }
-
-  Stream.call(this);
-}
-
-Object.defineProperty(Readable.prototype, 'destroyed', {
-  get: function () {
-    if (this._readableState === undefined) {
-      return false;
-    }
-    return this._readableState.destroyed;
-  },
-  set: function (value) {
-    // we ignore the value if the stream
-    // has not been initialized yet
-    if (!this._readableState) {
-      return;
-    }
-
-    // backward compatibility, the user is explicitly
-    // managing destroyed
-    this._readableState.destroyed = value;
-  }
-});
-
-Readable.prototype.destroy = destroyImpl.destroy;
-Readable.prototype._undestroy = destroyImpl.undestroy;
-Readable.prototype._destroy = function (err, cb) {
-  this.push(null);
-  cb(err);
-};
-
-// Manually shove something into the read() buffer.
-// This returns true if the highWaterMark has not been hit yet,
-// similar to how Writable.write() returns true if you should
-// write() some more.
-Readable.prototype.push = function (chunk, encoding) {
-  var state = this._readableState;
-  var skipChunkCheck;
-
-  if (!state.objectMode) {
-    if (typeof chunk === 'string') {
-      encoding = encoding || state.defaultEncoding;
-      if (encoding !== state.encoding) {
-        chunk = Buffer.from(chunk, encoding);
-        encoding = '';
-      }
-      skipChunkCheck = true;
-    }
-  } else {
-    skipChunkCheck = true;
-  }
-
-  return readableAddChunk(this, chunk, encoding, false, skipChunkCheck);
-};
-
-// Unshift should *always* be something directly out of read()
-Readable.prototype.unshift = function (chunk) {
-  return readableAddChunk(this, chunk, null, true, false);
-};
-
-function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) {
-  var state = stream._readableState;
-  if (chunk === null) {
-    state.reading = false;
-    onEofChunk(stream, state);
-  } else {
-    var er;
-    if (!skipChunkCheck) er = chunkInvalid(state, chunk);
-    if (er) {
-      stream.emit('error', er);
-    } else if (state.objectMode || chunk && chunk.length > 0) {
-      if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) {
-        chunk = _uint8ArrayToBuffer(chunk);
-      }
-
-      if (addToFront) {
-        if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true);
-      } else if (state.ended) {
-        stream.emit('error', new Error('stream.push() after EOF'));
-      } else {
-        state.reading = false;
-        if (state.decoder && !encoding) {
-          chunk = state.decoder.write(chunk);
-          if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state);
-        } else {
-          addChunk(stream, state, chunk, false);
-        }
-      }
-    } else if (!addToFront) {
-      state.reading = false;
-    }
-  }
-
-  return needMoreData(state);
-}
-
-function addChunk(stream, state, chunk, addToFront) {
-  if (state.flowing && state.length === 0 && !state.sync) {
-    stream.emit('data', chunk);
-    stream.read(0);
-  } else {
-    // update the buffer info.
-    state.length += state.objectMode ? 1 : chunk.length;
-    if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk);
-
-    if (state.needReadable) emitReadable(stream);
-  }
-  maybeReadMore(stream, state);
-}
-
-function chunkInvalid(state, chunk) {
-  var er;
-  if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
-    er = new TypeError('Invalid non-string/buffer chunk');
-  }
-  return er;
-}
-
-// if it's past the high water mark, we can push in some more.
-// Also, if we have no data yet, we can stand some
-// more bytes.  This is to work around cases where hwm=0,
-// such as the repl.  Also, if the push() triggered a
-// readable event, and the user called read(largeNumber) such that
-// needReadable was set, then we ought to push more, so that another
-// 'readable' event will be triggered.
-function needMoreData(state) {
-  return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0);
-}
-
-Readable.prototype.isPaused = function () {
-  return this._readableState.flowing === false;
-};
-
-// backwards compatibility.
-Readable.prototype.setEncoding = function (enc) {
-  if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder;
-  this._readableState.decoder = new StringDecoder(enc);
-  this._readableState.encoding = enc;
-  return this;
-};
-
-// Don't raise the hwm > 8MB
-var MAX_HWM = 0x800000;
-function computeNewHighWaterMark(n) {
-  if (n >= MAX_HWM) {
-    n = MAX_HWM;
-  } else {
-    // Get the next highest power of 2 to prevent increasing hwm excessively in
-    // tiny amounts
-    n--;
-    n |= n >>> 1;
-    n |= n >>> 2;
-    n |= n >>> 4;
-    n |= n >>> 8;
-    n |= n >>> 16;
-    n++;
-  }
-  return n;
-}
-
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function howMuchToRead(n, state) {
-  if (n <= 0 || state.length === 0 && state.ended) return 0;
-  if (state.objectMode) return 1;
-  if (n !== n) {
-    // Only flow one buffer at a time
-    if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length;
-  }
-  // If we're asking for more than the current hwm, then raise the hwm.
-  if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n);
-  if (n <= state.length) return n;
-  // Don't have enough
-  if (!state.ended) {
-    state.needReadable = true;
-    return 0;
-  }
-  return state.length;
-}
-
-// you can override either this method, or the async _read(n) below.
-Readable.prototype.read = function (n) {
-  debug('read', n);
-  n = parseInt(n, 10);
-  var state = this._readableState;
-  var nOrig = n;
-
-  if (n !== 0) state.emittedReadable = false;
-
-  // if we're doing read(0) to trigger a readable event, but we
-  // already have a bunch of data in the buffer, then just trigger
-  // the 'readable' event and move on.
-  if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) {
-    debug('read: emitReadable', state.length, state.ended);
-    if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this);
-    return null;
-  }
-
-  n = howMuchToRead(n, state);
-
-  // if we've ended, and we're now clear, then finish it up.
-  if (n === 0 && state.ended) {
-    if (state.length === 0) endReadable(this);
-    return null;
-  }
-
-  // All the actual chunk generation logic needs to be
-  // *below* the call to _read.  The reason is that in certain
-  // synthetic stream cases, such as passthrough streams, _read
-  // may be a completely synchronous operation which may change
-  // the state of the read buffer, providing enough data when
-  // before there was *not* enough.
-  //
-  // So, the steps are:
-  // 1. Figure out what the state of things will be after we do
-  // a read from the buffer.
-  //
-  // 2. If that resulting state will trigger a _read, then call _read.
-  // Note that this may be asynchronous, or synchronous.  Yes, it is
-  // deeply ugly to write APIs this way, but that still doesn't mean
-  // that the Readable class should behave improperly, as streams are
-  // designed to be sync/async agnostic.
-  // Take note if the _read call is sync or async (ie, if the read call
-  // has returned yet), so that we know whether or not it's safe to emit
-  // 'readable' etc.
-  //
-  // 3. Actually pull the requested chunks out of the buffer and return.
-
-  // if we need a readable event, then we need to do some reading.
-  var doRead = state.needReadable;
-  debug('need readable', doRead);
-
-  // if we currently have less than the highWaterMark, then also read some
-  if (state.length === 0 || state.length - n < state.highWaterMark) {
-    doRead = true;
-    debug('length less than watermark', doRead);
-  }
-
-  // however, if we've ended, then there's no point, and if we're already
-  // reading, then it's unnecessary.
-  if (state.ended || state.reading) {
-    doRead = false;
-    debug('reading or ended', doRead);
-  } else if (doRead) {
-    debug('do read');
-    state.reading = true;
-    state.sync = true;
-    // if the length is currently zero, then we *need* a readable event.
-    if (state.length === 0) state.needReadable = true;
-    // call internal read method
-    this._read(state.highWaterMark);
-    state.sync = false;
-    // If _read pushed data synchronously, then `reading` will be false,
-    // and we need to re-evaluate how much data we can return to the user.
-    if (!state.reading) n = howMuchToRead(nOrig, state);
-  }
-
-  var ret;
-  if (n > 0) ret = fromList(n, state);else ret = null;
-
-  if (ret === null) {
-    state.needReadable = true;
-    n = 0;
-  } else {
-    state.length -= n;
-  }
-
-  if (state.length === 0) {
-    // If we have nothing in the buffer, then we want to know
-    // as soon as we *do* get something into the buffer.
-    if (!state.ended) state.needReadable = true;
-
-    // If we tried to read() past the EOF, then emit end on the next tick.
-    if (nOrig !== n && state.ended) endReadable(this);
-  }
-
-  if (ret !== null) this.emit('data', ret);
-
-  return ret;
-};
-
-function onEofChunk(stream, state) {
-  if (state.ended) return;
-  if (state.decoder) {
-    var chunk = state.decoder.end();
-    if (chunk && chunk.length) {
-      state.buffer.push(chunk);
-      state.length += state.objectMode ? 1 : chunk.length;
-    }
-  }
-  state.ended = true;
-
-  // emit 'readable' now to make sure it gets picked up.
-  emitReadable(stream);
-}
-
-// Don't emit readable right away in sync mode, because this can trigger
-// another read() call => stack overflow.  This way, it might trigger
-// a nextTick recursion warning, but that's not so bad.
-function emitReadable(stream) {
-  var state = stream._readableState;
-  state.needReadable = false;
-  if (!state.emittedReadable) {
-    debug('emitReadable', state.flowing);
-    state.emittedReadable = true;
-    if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream);
-  }
-}
-
-function emitReadable_(stream) {
-  debug('emit readable');
-  stream.emit('readable');
-  flow(stream);
-}
-
-// at this point, the user has presumably seen the 'readable' event,
-// and called read() to consume some data.  that may have triggered
-// in turn another _read(n) call, in which case reading = true if
-// it's in progress.
-// However, if we're not ended, or reading, and the length < hwm,
-// then go ahead and try to read some more preemptively.
-function maybeReadMore(stream, state) {
-  if (!state.readingMore) {
-    state.readingMore = true;
-    pna.nextTick(maybeReadMore_, stream, state);
-  }
-}
-
-function maybeReadMore_(stream, state) {
-  var len = state.length;
-  while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) {
-    debug('maybeReadMore read 0');
-    stream.read(0);
-    if (len === state.length)
-      // didn't get any data, stop spinning.
-      break;else len = state.length;
-  }
-  state.readingMore = false;
-}
-
-// abstract method.  to be overridden in specific implementation classes.
-// call cb(er, data) where data is <= n in length.
-// for virtual (non-string, non-buffer) streams, "length" is somewhat
-// arbitrary, and perhaps not very meaningful.
-Readable.prototype._read = function (n) {
-  this.emit('error', new Error('_read() is not implemented'));
-};
-
-Readable.prototype.pipe = function (dest, pipeOpts) {
-  var src = this;
-  var state = this._readableState;
-
-  switch (state.pipesCount) {
-    case 0:
-      state.pipes = dest;
-      break;
-    case 1:
-      state.pipes = [state.pipes, dest];
-      break;
-    default:
-      state.pipes.push(dest);
-      break;
-  }
-  state.pipesCount += 1;
-  debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts);
-
-  var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr;
-
-  var endFn = doEnd ? onend : unpipe;
-  if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn);
-
-  dest.on('unpipe', onunpipe);
-  function onunpipe(readable, unpipeInfo) {
-    debug('onunpipe');
-    if (readable === src) {
-      if (unpipeInfo && unpipeInfo.hasUnpiped === false) {
-        unpipeInfo.hasUnpiped = true;
-        cleanup();
-      }
-    }
-  }
-
-  function onend() {
-    debug('onend');
-    dest.end();
-  }
-
-  // when the dest drains, it reduces the awaitDrain counter
-  // on the source.  This would be more elegant with a .once()
-  // handler in flow(), but adding and removing repeatedly is
-  // too slow.
-  var ondrain = pipeOnDrain(src);
-  dest.on('drain', ondrain);
-
-  var cleanedUp = false;
-  function cleanup() {
-    debug('cleanup');
-    // cleanup event handlers once the pipe is broken
-    dest.removeListener('close', onclose);
-    dest.removeListener('finish', onfinish);
-    dest.removeListener('drain', ondrain);
-    dest.removeListener('error', onerror);
-    dest.removeListener('unpipe', onunpipe);
-    src.removeListener('end', onend);
-    src.removeListener('end', unpipe);
-    src.removeListener('data', ondata);
-
-    cleanedUp = true;
-
-    // if the reader is waiting for a drain event from this
-    // specific writer, then it would cause it to never start
-    // flowing again.
-    // So, if this is awaiting a drain, then we just call it now.
-    // If we don't know, then assume that we are waiting for one.
-    if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain();
-  }
-
-  // If the user pushes more data while we're writing to dest then we'll end up
-  // in ondata again. However, we only want to increase awaitDrain once because
-  // dest will only emit one 'drain' event for the multiple writes.
-  // => Introduce a guard on increasing awaitDrain.
-  var increasedAwaitDrain = false;
-  src.on('data', ondata);
-  function ondata(chunk) {
-    debug('ondata');
-    increasedAwaitDrain = false;
-    var ret = dest.write(chunk);
-    if (false === ret && !increasedAwaitDrain) {
-      // If the user unpiped during `dest.write()`, it is possible
-      // to get stuck in a permanently paused state if that write
-      // also returned false.
-      // => Check whether `dest` is still a piping destination.
-      if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) {
-        debug('false write response, pause', state.awaitDrain);
-        state.awaitDrain++;
-        increasedAwaitDrain = true;
-      }
-      src.pause();
-    }
-  }
-
-  // if the dest has an error, then stop piping into it.
-  // however, don't suppress the throwing behavior for this.
-  function onerror(er) {
-    debug('onerror', er);
-    unpipe();
-    dest.removeListener('error', onerror);
-    if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er);
-  }
-
-  // Make sure our error handler is attached before userland ones.
-  prependListener(dest, 'error', onerror);
-
-  // Both close and finish should trigger unpipe, but only once.
-  function onclose() {
-    dest.removeListener('finish', onfinish);
-    unpipe();
-  }
-  dest.once('close', onclose);
-  function onfinish() {
-    debug('onfinish');
-    dest.removeListener('close', onclose);
-    unpipe();
-  }
-  dest.once('finish', onfinish);
-
-  function unpipe() {
-    debug('unpipe');
-    src.unpipe(dest);
-  }
-
-  // tell the dest that it's being piped to
-  dest.emit('pipe', src);
-
-  // start the flow if it hasn't been started already.
-  if (!state.flowing) {
-    debug('pipe resume');
-    src.resume();
-  }
-
-  return dest;
-};
-
-function pipeOnDrain(src) {
-  return function () {
-    var state = src._readableState;
-    debug('pipeOnDrain', state.awaitDrain);
-    if (state.awaitDrain) state.awaitDrain--;
-    if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) {
-      state.flowing = true;
-      flow(src);
-    }
-  };
-}
-
-Readable.prototype.unpipe = function (dest) {
-  var state = this._readableState;
-  var unpipeInfo = { hasUnpiped: false };
-
-  // if we're not piping anywhere, then do nothing.
-  if (state.pipesCount === 0) return this;
-
-  // just one destination.  most common case.
-  if (state.pipesCount === 1) {
-    // passed in one, but it's not the right one.
-    if (dest && dest !== state.pipes) return this;
-
-    if (!dest) dest = state.pipes;
-
-    // got a match.
-    state.pipes = null;
-    state.pipesCount = 0;
-    state.flowing = false;
-    if (dest) dest.emit('unpipe', this, unpipeInfo);
-    return this;
-  }
-
-  // slow case. multiple pipe destinations.
-
-  if (!dest) {
-    // remove all.
-    var dests = state.pipes;
-    var len = state.pipesCount;
-    state.pipes = null;
-    state.pipesCount = 0;
-    state.flowing = false;
-
-    for (var i = 0; i < len; i++) {
-      dests[i].emit('unpipe', this, { hasUnpiped: false });
-    }return this;
-  }
-
-  // try to find the right one.
-  var index = indexOf(state.pipes, dest);
-  if (index === -1) return this;
-
-  state.pipes.splice(index, 1);
-  state.pipesCount -= 1;
-  if (state.pipesCount === 1) state.pipes = state.pipes[0];
-
-  dest.emit('unpipe', this, unpipeInfo);
-
-  return this;
-};
-
-// set up data events if they are asked for
-// Ensure readable listeners eventually get something
-Readable.prototype.on = function (ev, fn) {
-  var res = Stream.prototype.on.call(this, ev, fn);
-
-  if (ev === 'data') {
-    // Start flowing on next tick if stream isn't explicitly paused
-    if (this._readableState.flowing !== false) this.resume();
-  } else if (ev === 'readable') {
-    var state = this._readableState;
-    if (!state.endEmitted && !state.readableListening) {
-      state.readableListening = state.needReadable = true;
-      state.emittedReadable = false;
-      if (!state.reading) {
-        pna.nextTick(nReadingNextTick, this);
-      } else if (state.length) {
-        emitReadable(this);
-      }
-    }
-  }
-
-  return res;
-};
-Readable.prototype.addListener = Readable.prototype.on;
-
-function nReadingNextTick(self) {
-  debug('readable nexttick read 0');
-  self.read(0);
-}
-
-// pause() and resume() are remnants of the legacy readable stream API
-// If the user uses them, then switch into old mode.
-Readable.prototype.resume = function () {
-  var state = this._readableState;
-  if (!state.flowing) {
-    debug('resume');
-    state.flowing = true;
-    resume(this, state);
-  }
-  return this;
-};
-
-function resume(stream, state) {
-  if (!state.resumeScheduled) {
-    state.resumeScheduled = true;
-    pna.nextTick(resume_, stream, state);
-  }
-}
-
-function resume_(stream, state) {
-  if (!state.reading) {
-    debug('resume read 0');
-    stream.read(0);
-  }
-
-  state.resumeScheduled = false;
-  state.awaitDrain = 0;
-  stream.emit('resume');
-  flow(stream);
-  if (state.flowing && !state.reading) stream.read(0);
-}
-
-Readable.prototype.pause = function () {
-  debug('call pause flowing=%j', this._readableState.flowing);
-  if (false !== this._readableState.flowing) {
-    debug('pause');
-    this._readableState.flowing = false;
-    this.emit('pause');
-  }
-  return this;
-};
-
-function flow(stream) {
-  var state = stream._readableState;
-  debug('flow', state.flowing);
-  while (state.flowing && stream.read() !== null) {}
-}
-
-// wrap an old-style stream as the async data source.
-// This is *not* part of the readable stream interface.
-// It is an ugly unfortunate mess of history.
-Readable.prototype.wrap = function (stream) {
-  var _this = this;
-
-  var state = this._readableState;
-  var paused = false;
-
-  stream.on('end', function () {
-    debug('wrapped end');
-    if (state.decoder && !state.ended) {
-      var chunk = state.decoder.end();
-      if (chunk && chunk.length) _this.push(chunk);
-    }
-
-    _this.push(null);
-  });
-
-  stream.on('data', function (chunk) {
-    debug('wrapped data');
-    if (state.decoder) chunk = state.decoder.write(chunk);
-
-    // don't skip over falsy values in objectMode
-    if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return;
-
-    var ret = _this.push(chunk);
-    if (!ret) {
-      paused = true;
-      stream.pause();
-    }
-  });
-
-  // proxy all the other methods.
-  // important when wrapping filters and duplexes.
-  for (var i in stream) {
-    if (this[i] === undefined && typeof stream[i] === 'function') {
-      this[i] = function (method) {
-        return function () {
-          return stream[method].apply(stream, arguments);
-        };
-      }(i);
-    }
-  }
-
-  // proxy certain important events.
-  for (var n = 0; n < kProxyEvents.length; n++) {
-    stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n]));
-  }
-
-  // when we try to consume some more bytes, simply unpause the
-  // underlying stream.
-  this._read = function (n) {
-    debug('wrapped _read', n);
-    if (paused) {
-      paused = false;
-      stream.resume();
-    }
-  };
-
-  return this;
-};
-
-Object.defineProperty(Readable.prototype, 'readableHighWaterMark', {
-  // making it explicit this property is not enumerable
-  // because otherwise some prototype manipulation in
-  // userland will fail
-  enumerable: false,
-  get: function () {
-    return this._readableState.highWaterMark;
-  }
-});
-
-// exposed for testing purposes only.
-Readable._fromList = fromList;
-
-// Pluck off n bytes from an array of buffers.
-// Length is the combined lengths of all the buffers in the list.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function fromList(n, state) {
-  // nothing buffered
-  if (state.length === 0) return null;
-
-  var ret;
-  if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) {
-    // read it all, truncate the list
-    if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length);
-    state.buffer.clear();
-  } else {
-    // read part of list
-    ret = fromListPartial(n, state.buffer, state.decoder);
-  }
-
-  return ret;
-}
-
-// Extracts only enough buffered data to satisfy the amount requested.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function fromListPartial(n, list, hasStrings) {
-  var ret;
-  if (n < list.head.data.length) {
-    // slice is the same for buffers and strings
-    ret = list.head.data.slice(0, n);
-    list.head.data = list.head.data.slice(n);
-  } else if (n === list.head.data.length) {
-    // first chunk is a perfect match
-    ret = list.shift();
-  } else {
-    // result spans more than one buffer
-    ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list);
-  }
-  return ret;
-}
-
-// Copies a specified amount of characters from the list of buffered data
-// chunks.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function copyFromBufferString(n, list) {
-  var p = list.head;
-  var c = 1;
-  var ret = p.data;
-  n -= ret.length;
-  while (p = p.next) {
-    var str = p.data;
-    var nb = n > str.length ? str.length : n;
-    if (nb === str.length) ret += str;else ret += str.slice(0, n);
-    n -= nb;
-    if (n === 0) {
-      if (nb === str.length) {
-        ++c;
-        if (p.next) list.head = p.next;else list.head = list.tail = null;
-      } else {
-        list.head = p;
-        p.data = str.slice(nb);
-      }
-      break;
-    }
-    ++c;
-  }
-  list.length -= c;
-  return ret;
-}
-
-// Copies a specified amount of bytes from the list of buffered data chunks.
-// This function is designed to be inlinable, so please take care when making
-// changes to the function body.
-function copyFromBuffer(n, list) {
-  var ret = Buffer.allocUnsafe(n);
-  var p = list.head;
-  var c = 1;
-  p.data.copy(ret);
-  n -= p.data.length;
-  while (p = p.next) {
-    var buf = p.data;
-    var nb = n > buf.length ? buf.length : n;
-    buf.copy(ret, ret.length - n, 0, nb);
-    n -= nb;
-    if (n === 0) {
-      if (nb === buf.length) {
-        ++c;
-        if (p.next) list.head = p.next;else list.head = list.tail = null;
-      } else {
-        list.head = p;
-        p.data = buf.slice(nb);
-      }
-      break;
-    }
-    ++c;
-  }
-  list.length -= c;
-  return ret;
-}
-
-function endReadable(stream) {
-  var state = stream._readableState;
-
-  // If we get here before consuming all the bytes, then that is a
-  // bug in node.  Should never happen.
-  if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream');
-
-  if (!state.endEmitted) {
-    state.ended = true;
-    pna.nextTick(endReadableNT, state, stream);
-  }
-}
-
-function endReadableNT(state, stream) {
-  // Check that we didn't get one last unshift.
-  if (!state.endEmitted && state.length === 0) {
-    state.endEmitted = true;
-    stream.readable = false;
-    stream.emit('end');
-  }
-}
-
-function indexOf(xs, x) {
-  for (var i = 0, l = xs.length; i < l; i++) {
-    if (xs[i] === x) return i;
-  }
-  return -1;
-}
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_transform.js
deleted file mode 100644
index fcfc105af..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_transform.js
+++ /dev/null
@@ -1,214 +0,0 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-// a transform stream is a readable/writable stream where you do
-// something with the data.  Sometimes it's called a "filter",
-// but that's not a great name for it, since that implies a thing where
-// some bits pass through, and others are simply ignored.  (That would
-// be a valid example of a transform, of course.)
-//
-// While the output is causally related to the input, it's not a
-// necessarily symmetric or synchronous transformation.  For example,
-// a zlib stream might take multiple plain-text writes(), and then
-// emit a single compressed chunk some time in the future.
-//
-// Here's how this works:
-//
-// The Transform stream has all the aspects of the readable and writable
-// stream classes.  When you write(chunk), that calls _write(chunk,cb)
-// internally, and returns false if there's a lot of pending writes
-// buffered up.  When you call read(), that calls _read(n) until
-// there's enough pending readable data buffered up.
-//
-// In a transform stream, the written data is placed in a buffer.  When
-// _read(n) is called, it transforms the queued up data, calling the
-// buffered _write cb's as it consumes chunks.  If consuming a single
-// written chunk would result in multiple output chunks, then the first
-// outputted bit calls the readcb, and subsequent chunks just go into
-// the read buffer, and will cause it to emit 'readable' if necessary.
-//
-// This way, back-pressure is actually determined by the reading side,
-// since _read has to be called to start processing a new chunk.  However,
-// a pathological inflate type of transform can cause excessive buffering
-// here.  For example, imagine a stream where every byte of input is
-// interpreted as an integer from 0-255, and then results in that many
-// bytes of output.  Writing the 4 bytes {ff,ff,ff,ff} would result in
-// 1kb of data being output.  In this case, you could write a very small
-// amount of input, and end up with a very large amount of output.  In
-// such a pathological inflating mechanism, there'd be no way to tell
-// the system to stop doing the transform.  A single 4MB write could
-// cause the system to run out of memory.
-//
-// However, even in such a pathological case, only a single written chunk
-// would be consumed, and then the rest would wait (un-transformed) until
-// the results of the previous transformed chunk were consumed.
-
-'use strict';
-
-module.exports = Transform;
-
-var Duplex = require('./_stream_duplex');
-
-/**/
-var util = Object.create(require('core-util-is'));
-util.inherits = require('inherits');
-/**/
-
-util.inherits(Transform, Duplex);
-
-function afterTransform(er, data) {
-  var ts = this._transformState;
-  ts.transforming = false;
-
-  var cb = ts.writecb;
-
-  if (!cb) {
-    return this.emit('error', new Error('write callback called multiple times'));
-  }
-
-  ts.writechunk = null;
-  ts.writecb = null;
-
-  if (data != null) // single equals check for both `null` and `undefined`
-    this.push(data);
-
-  cb(er);
-
-  var rs = this._readableState;
-  rs.reading = false;
-  if (rs.needReadable || rs.length < rs.highWaterMark) {
-    this._read(rs.highWaterMark);
-  }
-}
-
-function Transform(options) {
-  if (!(this instanceof Transform)) return new Transform(options);
-
-  Duplex.call(this, options);
-
-  this._transformState = {
-    afterTransform: afterTransform.bind(this),
-    needTransform: false,
-    transforming: false,
-    writecb: null,
-    writechunk: null,
-    writeencoding: null
-  };
-
-  // start out asking for a readable event once data is transformed.
-  this._readableState.needReadable = true;
-
-  // we have implemented the _read method, and done the other things
-  // that Readable wants before the first _read call, so unset the
-  // sync guard flag.
-  this._readableState.sync = false;
-
-  if (options) {
-    if (typeof options.transform === 'function') this._transform = options.transform;
-
-    if (typeof options.flush === 'function') this._flush = options.flush;
-  }
-
-  // When the writable side finishes, then flush out anything remaining.
-  this.on('prefinish', prefinish);
-}
-
-function prefinish() {
-  var _this = this;
-
-  if (typeof this._flush === 'function') {
-    this._flush(function (er, data) {
-      done(_this, er, data);
-    });
-  } else {
-    done(this, null, null);
-  }
-}
-
-Transform.prototype.push = function (chunk, encoding) {
-  this._transformState.needTransform = false;
-  return Duplex.prototype.push.call(this, chunk, encoding);
-};
-
-// This is the part where you do stuff!
-// override this function in implementation classes.
-// 'chunk' is an input chunk.
-//
-// Call `push(newChunk)` to pass along transformed output
-// to the readable side.  You may call 'push' zero or more times.
-//
-// Call `cb(err)` when you are done with this chunk.  If you pass
-// an error, then that'll put the hurt on the whole operation.  If you
-// never call cb(), then you'll never get another chunk.
-Transform.prototype._transform = function (chunk, encoding, cb) {
-  throw new Error('_transform() is not implemented');
-};
-
-Transform.prototype._write = function (chunk, encoding, cb) {
-  var ts = this._transformState;
-  ts.writecb = cb;
-  ts.writechunk = chunk;
-  ts.writeencoding = encoding;
-  if (!ts.transforming) {
-    var rs = this._readableState;
-    if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark);
-  }
-};
-
-// Doesn't matter what the args are here.
-// _transform does all the work.
-// That we got here means that the readable side wants more data.
-Transform.prototype._read = function (n) {
-  var ts = this._transformState;
-
-  if (ts.writechunk !== null && ts.writecb && !ts.transforming) {
-    ts.transforming = true;
-    this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform);
-  } else {
-    // mark that we need a transform, so that any data that comes in
-    // will get processed, now that we've asked for it.
-    ts.needTransform = true;
-  }
-};
-
-Transform.prototype._destroy = function (err, cb) {
-  var _this2 = this;
-
-  Duplex.prototype._destroy.call(this, err, function (err2) {
-    cb(err2);
-    _this2.emit('close');
-  });
-};
-
-function done(stream, er, data) {
-  if (er) return stream.emit('error', er);
-
-  if (data != null) // single equals check for both `null` and `undefined`
-    stream.push(data);
-
-  // if there's nothing in the write buffer, then that means
-  // that nothing more will ever be provided
-  if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0');
-
-  if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming');
-
-  return stream.push(null);
-}
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_writable.js
deleted file mode 100644
index e1e897ff3..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/lib/_stream_writable.js
+++ /dev/null
@@ -1,685 +0,0 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-// A bit simpler than readable streams.
-// Implement an async ._write(chunk, encoding, cb), and it'll handle all
-// the drain event emission and buffering.
-
-'use strict';
-
-/**/
-
-var pna = require('process-nextick-args');
-/**/
-
-module.exports = Writable;
-
-/*  */
-function WriteReq(chunk, encoding, cb) {
-  this.chunk = chunk;
-  this.encoding = encoding;
-  this.callback = cb;
-  this.next = null;
-}
-
-// It seems a linked list but it is not
-// there will be only 2 of these for each stream
-function CorkedRequest(state) {
-  var _this = this;
-
-  this.next = null;
-  this.entry = null;
-  this.finish = function () {
-    onCorkedFinish(_this, state);
-  };
-}
-/*  */
-
-/**/
-var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick;
-/**/
-
-/**/
-var Duplex;
-/**/
-
-Writable.WritableState = WritableState;
-
-/**/
-var util = Object.create(require('core-util-is'));
-util.inherits = require('inherits');
-/**/
-
-/**/
-var internalUtil = {
-  deprecate: require('util-deprecate')
-};
-/**/
-
-/**/
-var Stream = require('./internal/streams/stream');
-/**/
-
-/**/
-
-var Buffer = require('safe-buffer').Buffer;
-var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {};
-function _uint8ArrayToBuffer(chunk) {
-  return Buffer.from(chunk);
-}
-function _isUint8Array(obj) {
-  return Buffer.isBuffer(obj) || obj instanceof OurUint8Array;
-}
-
-/**/
-
-var destroyImpl = require('./internal/streams/destroy');
-
-util.inherits(Writable, Stream);
-
-function nop() {}
-
-function WritableState(options, stream) {
-  Duplex = Duplex || require('./_stream_duplex');
-
-  options = options || {};
-
-  // Duplex streams are both readable and writable, but share
-  // the same options object.
-  // However, some cases require setting options to different
-  // values for the readable and the writable sides of the duplex stream.
-  // These options can be provided separately as readableXXX and writableXXX.
-  var isDuplex = stream instanceof Duplex;
-
-  // object stream flag to indicate whether or not this stream
-  // contains buffers or objects.
-  this.objectMode = !!options.objectMode;
-
-  if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode;
-
-  // the point at which write() starts returning false
-  // Note: 0 is a valid value, means that we always return false if
-  // the entire buffer is not flushed immediately on write()
-  var hwm = options.highWaterMark;
-  var writableHwm = options.writableHighWaterMark;
-  var defaultHwm = this.objectMode ? 16 : 16 * 1024;
-
-  if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm;
-
-  // cast to ints.
-  this.highWaterMark = Math.floor(this.highWaterMark);
-
-  // if _final has been called
-  this.finalCalled = false;
-
-  // drain event flag.
-  this.needDrain = false;
-  // at the start of calling end()
-  this.ending = false;
-  // when end() has been called, and returned
-  this.ended = false;
-  // when 'finish' is emitted
-  this.finished = false;
-
-  // has it been destroyed
-  this.destroyed = false;
-
-  // should we decode strings into buffers before passing to _write?
-  // this is here so that some node-core streams can optimize string
-  // handling at a lower level.
-  var noDecode = options.decodeStrings === false;
-  this.decodeStrings = !noDecode;
-
-  // Crypto is kind of old and crusty.  Historically, its default string
-  // encoding is 'binary' so we have to make this configurable.
-  // Everything else in the universe uses 'utf8', though.
-  this.defaultEncoding = options.defaultEncoding || 'utf8';
-
-  // not an actual buffer we keep track of, but a measurement
-  // of how much we're waiting to get pushed to some underlying
-  // socket or file.
-  this.length = 0;
-
-  // a flag to see when we're in the middle of a write.
-  this.writing = false;
-
-  // when true all writes will be buffered until .uncork() call
-  this.corked = 0;
-
-  // a flag to be able to tell if the onwrite cb is called immediately,
-  // or on a later tick.  We set this to true at first, because any
-  // actions that shouldn't happen until "later" should generally also
-  // not happen before the first write call.
-  this.sync = true;
-
-  // a flag to know if we're processing previously buffered items, which
-  // may call the _write() callback in the same tick, so that we don't
-  // end up in an overlapped onwrite situation.
-  this.bufferProcessing = false;
-
-  // the callback that's passed to _write(chunk,cb)
-  this.onwrite = function (er) {
-    onwrite(stream, er);
-  };
-
-  // the callback that the user supplies to write(chunk,encoding,cb)
-  this.writecb = null;
-
-  // the amount that is being written when _write is called.
-  this.writelen = 0;
-
-  this.bufferedRequest = null;
-  this.lastBufferedRequest = null;
-
-  // number of pending user-supplied write callbacks
-  // this must be 0 before 'finish' can be emitted
-  this.pendingcb = 0;
-
-  // emit prefinish if the only thing we're waiting for is _write cbs
-  // This is relevant for synchronous Transform streams
-  this.prefinished = false;
-
-  // True if the error was already emitted and should not be thrown again
-  this.errorEmitted = false;
-
-  // count buffered requests
-  this.bufferedRequestCount = 0;
-
-  // allocate the first CorkedRequest, there is always
-  // one allocated and free to use, and we maintain at most two
-  this.corkedRequestsFree = new CorkedRequest(this);
-}
-
-WritableState.prototype.getBuffer = function getBuffer() {
-  var current = this.bufferedRequest;
-  var out = [];
-  while (current) {
-    out.push(current);
-    current = current.next;
-  }
-  return out;
-};
-
-(function () {
-  try {
-    Object.defineProperty(WritableState.prototype, 'buffer', {
-      get: internalUtil.deprecate(function () {
-        return this.getBuffer();
-      }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003')
-    });
-  } catch (_) {}
-})();
-
-// Test _writableState for inheritance to account for Duplex streams,
-// whose prototype chain only points to Readable.
-var realHasInstance;
-if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') {
-  realHasInstance = Function.prototype[Symbol.hasInstance];
-  Object.defineProperty(Writable, Symbol.hasInstance, {
-    value: function (object) {
-      if (realHasInstance.call(this, object)) return true;
-      if (this !== Writable) return false;
-
-      return object && object._writableState instanceof WritableState;
-    }
-  });
-} else {
-  realHasInstance = function (object) {
-    return object instanceof this;
-  };
-}
-
-function Writable(options) {
-  Duplex = Duplex || require('./_stream_duplex');
-
-  // Writable ctor is applied to Duplexes, too.
-  // `realHasInstance` is necessary because using plain `instanceof`
-  // would return false, as no `_writableState` property is attached.
-
-  // Trying to use the custom `instanceof` for Writable here will also break the
-  // Node.js LazyTransform implementation, which has a non-trivial getter for
-  // `_writableState` that would lead to infinite recursion.
-  if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) {
-    return new Writable(options);
-  }
-
-  this._writableState = new WritableState(options, this);
-
-  // legacy.
-  this.writable = true;
-
-  if (options) {
-    if (typeof options.write === 'function') this._write = options.write;
-
-    if (typeof options.writev === 'function') this._writev = options.writev;
-
-    if (typeof options.destroy === 'function') this._destroy = options.destroy;
-
-    if (typeof options.final === 'function') this._final = options.final;
-  }
-
-  Stream.call(this);
-}
-
-// Otherwise people can pipe Writable streams, which is just wrong.
-Writable.prototype.pipe = function () {
-  this.emit('error', new Error('Cannot pipe, not readable'));
-};
-
-function writeAfterEnd(stream, cb) {
-  var er = new Error('write after end');
-  // TODO: defer error events consistently everywhere, not just the cb
-  stream.emit('error', er);
-  pna.nextTick(cb, er);
-}
-
-// Checks that a user-supplied chunk is valid, especially for the particular
-// mode the stream is in. Currently this means that `null` is never accepted
-// and undefined/non-string values are only allowed in object mode.
-function validChunk(stream, state, chunk, cb) {
-  var valid = true;
-  var er = false;
-
-  if (chunk === null) {
-    er = new TypeError('May not write null values to stream');
-  } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) {
-    er = new TypeError('Invalid non-string/buffer chunk');
-  }
-  if (er) {
-    stream.emit('error', er);
-    pna.nextTick(cb, er);
-    valid = false;
-  }
-  return valid;
-}
-
-Writable.prototype.write = function (chunk, encoding, cb) {
-  var state = this._writableState;
-  var ret = false;
-  var isBuf = !state.objectMode && _isUint8Array(chunk);
-
-  if (isBuf && !Buffer.isBuffer(chunk)) {
-    chunk = _uint8ArrayToBuffer(chunk);
-  }
-
-  if (typeof encoding === 'function') {
-    cb = encoding;
-    encoding = null;
-  }
-
-  if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding;
-
-  if (typeof cb !== 'function') cb = nop;
-
-  if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) {
-    state.pendingcb++;
-    ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb);
-  }
-
-  return ret;
-};
-
-Writable.prototype.cork = function () {
-  var state = this._writableState;
-
-  state.corked++;
-};
-
-Writable.prototype.uncork = function () {
-  var state = this._writableState;
-
-  if (state.corked) {
-    state.corked--;
-
-    if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state);
-  }
-};
-
-Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
-  // node::ParseEncoding() requires lower case.
-  if (typeof encoding === 'string') encoding = encoding.toLowerCase();
-  if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding);
-  this._writableState.defaultEncoding = encoding;
-  return this;
-};
-
-function decodeChunk(state, chunk, encoding) {
-  if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') {
-    chunk = Buffer.from(chunk, encoding);
-  }
-  return chunk;
-}
-
-Object.defineProperty(Writable.prototype, 'writableHighWaterMark', {
-  // making it explicit this property is not enumerable
-  // because otherwise some prototype manipulation in
-  // userland will fail
-  enumerable: false,
-  get: function () {
-    return this._writableState.highWaterMark;
-  }
-});
-
-// if we're already writing something, then just put this
-// in the queue, and wait our turn.  Otherwise, call _write
-// If we return false, then we need a drain event, so set that flag.
-function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) {
-  if (!isBuf) {
-    var newChunk = decodeChunk(state, chunk, encoding);
-    if (chunk !== newChunk) {
-      isBuf = true;
-      encoding = 'buffer';
-      chunk = newChunk;
-    }
-  }
-  var len = state.objectMode ? 1 : chunk.length;
-
-  state.length += len;
-
-  var ret = state.length < state.highWaterMark;
-  // we must ensure that previous needDrain will not be reset to false.
-  if (!ret) state.needDrain = true;
-
-  if (state.writing || state.corked) {
-    var last = state.lastBufferedRequest;
-    state.lastBufferedRequest = {
-      chunk: chunk,
-      encoding: encoding,
-      isBuf: isBuf,
-      callback: cb,
-      next: null
-    };
-    if (last) {
-      last.next = state.lastBufferedRequest;
-    } else {
-      state.bufferedRequest = state.lastBufferedRequest;
-    }
-    state.bufferedRequestCount += 1;
-  } else {
-    doWrite(stream, state, false, len, chunk, encoding, cb);
-  }
-
-  return ret;
-}
-
-function doWrite(stream, state, writev, len, chunk, encoding, cb) {
-  state.writelen = len;
-  state.writecb = cb;
-  state.writing = true;
-  state.sync = true;
-  if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite);
-  state.sync = false;
-}
-
-function onwriteError(stream, state, sync, er, cb) {
-  --state.pendingcb;
-
-  if (sync) {
-    // defer the callback if we are being called synchronously
-    // to avoid piling up things on the stack
-    pna.nextTick(cb, er);
-    // this can emit finish, and it will always happen
-    // after error
-    pna.nextTick(finishMaybe, stream, state);
-    stream._writableState.errorEmitted = true;
-    stream.emit('error', er);
-  } else {
-    // the caller expect this to happen before if
-    // it is async
-    cb(er);
-    stream._writableState.errorEmitted = true;
-    stream.emit('error', er);
-    // this can emit finish, but finish must
-    // always follow error
-    finishMaybe(stream, state);
-  }
-}
-
-function onwriteStateUpdate(state) {
-  state.writing = false;
-  state.writecb = null;
-  state.length -= state.writelen;
-  state.writelen = 0;
-}
-
-function onwrite(stream, er) {
-  var state = stream._writableState;
-  var sync = state.sync;
-  var cb = state.writecb;
-
-  onwriteStateUpdate(state);
-
-  if (er) onwriteError(stream, state, sync, er, cb);else {
-    // Check if we're actually ready to finish, but don't emit yet
-    var finished = needFinish(state);
-
-    if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) {
-      clearBuffer(stream, state);
-    }
-
-    if (sync) {
-      /**/
-      asyncWrite(afterWrite, stream, state, finished, cb);
-      /**/
-    } else {
-      afterWrite(stream, state, finished, cb);
-    }
-  }
-}
-
-function afterWrite(stream, state, finished, cb) {
-  if (!finished) onwriteDrain(stream, state);
-  state.pendingcb--;
-  cb();
-  finishMaybe(stream, state);
-}
-
-// Must force callback to be called on nextTick, so that we don't
-// emit 'drain' before the write() consumer gets the 'false' return
-// value, and has a chance to attach a 'drain' listener.
-function onwriteDrain(stream, state) {
-  if (state.length === 0 && state.needDrain) {
-    state.needDrain = false;
-    stream.emit('drain');
-  }
-}
-
-// if there's something in the buffer waiting, then process it
-function clearBuffer(stream, state) {
-  state.bufferProcessing = true;
-  var entry = state.bufferedRequest;
-
-  if (stream._writev && entry && entry.next) {
-    // Fast case, write everything using _writev()
-    var l = state.bufferedRequestCount;
-    var buffer = new Array(l);
-    var holder = state.corkedRequestsFree;
-    holder.entry = entry;
-
-    var count = 0;
-    var allBuffers = true;
-    while (entry) {
-      buffer[count] = entry;
-      if (!entry.isBuf) allBuffers = false;
-      entry = entry.next;
-      count += 1;
-    }
-    buffer.allBuffers = allBuffers;
-
-    doWrite(stream, state, true, state.length, buffer, '', holder.finish);
-
-    // doWrite is almost always async, defer these to save a bit of time
-    // as the hot path ends with doWrite
-    state.pendingcb++;
-    state.lastBufferedRequest = null;
-    if (holder.next) {
-      state.corkedRequestsFree = holder.next;
-      holder.next = null;
-    } else {
-      state.corkedRequestsFree = new CorkedRequest(state);
-    }
-    state.bufferedRequestCount = 0;
-  } else {
-    // Slow case, write chunks one-by-one
-    while (entry) {
-      var chunk = entry.chunk;
-      var encoding = entry.encoding;
-      var cb = entry.callback;
-      var len = state.objectMode ? 1 : chunk.length;
-
-      doWrite(stream, state, false, len, chunk, encoding, cb);
-      entry = entry.next;
-      state.bufferedRequestCount--;
-      // if we didn't call the onwrite immediately, then
-      // it means that we need to wait until it does.
-      // also, that means that the chunk and cb are currently
-      // being processed, so move the buffer counter past them.
-      if (state.writing) {
-        break;
-      }
-    }
-
-    if (entry === null) state.lastBufferedRequest = null;
-  }
-
-  state.bufferedRequest = entry;
-  state.bufferProcessing = false;
-}
-
-Writable.prototype._write = function (chunk, encoding, cb) {
-  cb(new Error('_write() is not implemented'));
-};
-
-Writable.prototype._writev = null;
-
-Writable.prototype.end = function (chunk, encoding, cb) {
-  var state = this._writableState;
-
-  if (typeof chunk === 'function') {
-    cb = chunk;
-    chunk = null;
-    encoding = null;
-  } else if (typeof encoding === 'function') {
-    cb = encoding;
-    encoding = null;
-  }
-
-  if (chunk !== null && chunk !== undefined) this.write(chunk, encoding);
-
-  // .end() fully uncorks
-  if (state.corked) {
-    state.corked = 1;
-    this.uncork();
-  }
-
-  // ignore unnecessary end() calls.
-  if (!state.ending) endWritable(this, state, cb);
-};
-
-function needFinish(state) {
-  return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing;
-}
-function callFinal(stream, state) {
-  stream._final(function (err) {
-    state.pendingcb--;
-    if (err) {
-      stream.emit('error', err);
-    }
-    state.prefinished = true;
-    stream.emit('prefinish');
-    finishMaybe(stream, state);
-  });
-}
-function prefinish(stream, state) {
-  if (!state.prefinished && !state.finalCalled) {
-    if (typeof stream._final === 'function') {
-      state.pendingcb++;
-      state.finalCalled = true;
-      pna.nextTick(callFinal, stream, state);
-    } else {
-      state.prefinished = true;
-      stream.emit('prefinish');
-    }
-  }
-}
-
-function finishMaybe(stream, state) {
-  var need = needFinish(state);
-  if (need) {
-    prefinish(stream, state);
-    if (state.pendingcb === 0) {
-      state.finished = true;
-      stream.emit('finish');
-    }
-  }
-  return need;
-}
-
-function endWritable(stream, state, cb) {
-  state.ending = true;
-  finishMaybe(stream, state);
-  if (cb) {
-    if (state.finished) pna.nextTick(cb);else stream.once('finish', cb);
-  }
-  state.ended = true;
-  stream.writable = false;
-}
-
-function onCorkedFinish(corkReq, state, err) {
-  var entry = corkReq.entry;
-  corkReq.entry = null;
-  while (entry) {
-    var cb = entry.callback;
-    state.pendingcb--;
-    cb(err);
-    entry = entry.next;
-  }
-
-  // reuse the free corkReq.
-  state.corkedRequestsFree.next = corkReq;
-}
-
-Object.defineProperty(Writable.prototype, 'destroyed', {
-  get: function () {
-    if (this._writableState === undefined) {
-      return false;
-    }
-    return this._writableState.destroyed;
-  },
-  set: function (value) {
-    // we ignore the value if the stream
-    // has not been initialized yet
-    if (!this._writableState) {
-      return;
-    }
-
-    // backward compatibility, the user is explicitly
-    // managing destroyed
-    this._writableState.destroyed = value;
-  }
-});
-
-Writable.prototype.destroy = destroyImpl.destroy;
-Writable.prototype._undestroy = destroyImpl.undestroy;
-Writable.prototype._destroy = function (err, cb) {
-  this.end();
-  cb(err);
-};
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/BufferList.js
deleted file mode 100644
index 5e080976c..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/BufferList.js
+++ /dev/null
@@ -1,78 +0,0 @@
-'use strict';
-
-function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-
-var Buffer = require('safe-buffer').Buffer;
-var util = require('util');
-
-function copyBuffer(src, target, offset) {
-  src.copy(target, offset);
-}
-
-module.exports = function () {
-  function BufferList() {
-    _classCallCheck(this, BufferList);
-
-    this.head = null;
-    this.tail = null;
-    this.length = 0;
-  }
-
-  BufferList.prototype.push = function push(v) {
-    var entry = { data: v, next: null };
-    if (this.length > 0) this.tail.next = entry;else this.head = entry;
-    this.tail = entry;
-    ++this.length;
-  };
-
-  BufferList.prototype.unshift = function unshift(v) {
-    var entry = { data: v, next: this.head };
-    if (this.length === 0) this.tail = entry;
-    this.head = entry;
-    ++this.length;
-  };
-
-  BufferList.prototype.shift = function shift() {
-    if (this.length === 0) return;
-    var ret = this.head.data;
-    if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next;
-    --this.length;
-    return ret;
-  };
-
-  BufferList.prototype.clear = function clear() {
-    this.head = this.tail = null;
-    this.length = 0;
-  };
-
-  BufferList.prototype.join = function join(s) {
-    if (this.length === 0) return '';
-    var p = this.head;
-    var ret = '' + p.data;
-    while (p = p.next) {
-      ret += s + p.data;
-    }return ret;
-  };
-
-  BufferList.prototype.concat = function concat(n) {
-    if (this.length === 0) return Buffer.alloc(0);
-    var ret = Buffer.allocUnsafe(n >>> 0);
-    var p = this.head;
-    var i = 0;
-    while (p) {
-      copyBuffer(p.data, ret, i);
-      i += p.data.length;
-      p = p.next;
-    }
-    return ret;
-  };
-
-  return BufferList;
-}();
-
-if (util && util.inspect && util.inspect.custom) {
-  module.exports.prototype[util.inspect.custom] = function () {
-    var obj = util.inspect({ length: this.length });
-    return this.constructor.name + ' ' + obj;
-  };
-}
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/destroy.js
deleted file mode 100644
index 85a821407..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/destroy.js
+++ /dev/null
@@ -1,84 +0,0 @@
-'use strict';
-
-/**/
-
-var pna = require('process-nextick-args');
-/**/
-
-// undocumented cb() API, needed for core, not for public API
-function destroy(err, cb) {
-  var _this = this;
-
-  var readableDestroyed = this._readableState && this._readableState.destroyed;
-  var writableDestroyed = this._writableState && this._writableState.destroyed;
-
-  if (readableDestroyed || writableDestroyed) {
-    if (cb) {
-      cb(err);
-    } else if (err) {
-      if (!this._writableState) {
-        pna.nextTick(emitErrorNT, this, err);
-      } else if (!this._writableState.errorEmitted) {
-        this._writableState.errorEmitted = true;
-        pna.nextTick(emitErrorNT, this, err);
-      }
-    }
-
-    return this;
-  }
-
-  // we set destroyed to true before firing error callbacks in order
-  // to make it re-entrance safe in case destroy() is called within callbacks
-
-  if (this._readableState) {
-    this._readableState.destroyed = true;
-  }
-
-  // if this is a duplex stream mark the writable part as destroyed as well
-  if (this._writableState) {
-    this._writableState.destroyed = true;
-  }
-
-  this._destroy(err || null, function (err) {
-    if (!cb && err) {
-      if (!_this._writableState) {
-        pna.nextTick(emitErrorNT, _this, err);
-      } else if (!_this._writableState.errorEmitted) {
-        _this._writableState.errorEmitted = true;
-        pna.nextTick(emitErrorNT, _this, err);
-      }
-    } else if (cb) {
-      cb(err);
-    }
-  });
-
-  return this;
-}
-
-function undestroy() {
-  if (this._readableState) {
-    this._readableState.destroyed = false;
-    this._readableState.reading = false;
-    this._readableState.ended = false;
-    this._readableState.endEmitted = false;
-  }
-
-  if (this._writableState) {
-    this._writableState.destroyed = false;
-    this._writableState.ended = false;
-    this._writableState.ending = false;
-    this._writableState.finalCalled = false;
-    this._writableState.prefinished = false;
-    this._writableState.finished = false;
-    this._writableState.errorEmitted = false;
-  }
-}
-
-function emitErrorNT(self, err) {
-  self.emit('error', err);
-}
-
-module.exports = {
-  destroy: destroy,
-  undestroy: undestroy
-};
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream-browser.js
deleted file mode 100644
index 9332a3fda..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream-browser.js
+++ /dev/null
@@ -1 +0,0 @@
-module.exports = require('events').EventEmitter;
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream.js
deleted file mode 100644
index ce2ad5b6e..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/lib/internal/streams/stream.js
+++ /dev/null
@@ -1 +0,0 @@
-module.exports = require('stream');
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/package.json b/node_modules/archiver-utils/node_modules/readable-stream/package.json
deleted file mode 100644
index 514c178e9..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/package.json
+++ /dev/null
@@ -1,52 +0,0 @@
-{
-  "name": "readable-stream",
-  "version": "2.3.8",
-  "description": "Streams3, a user-land copy of the stream library from Node.js",
-  "main": "readable.js",
-  "dependencies": {
-    "core-util-is": "~1.0.0",
-    "inherits": "~2.0.3",
-    "isarray": "~1.0.0",
-    "process-nextick-args": "~2.0.0",
-    "safe-buffer": "~5.1.1",
-    "string_decoder": "~1.1.1",
-    "util-deprecate": "~1.0.1"
-  },
-  "devDependencies": {
-    "assert": "^1.4.0",
-    "babel-polyfill": "^6.9.1",
-    "buffer": "^4.9.0",
-    "lolex": "^2.3.2",
-    "nyc": "^6.4.0",
-    "tap": "^0.7.0",
-    "tape": "^4.8.0"
-  },
-  "scripts": {
-    "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js",
-    "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js",
-    "cover": "nyc npm test",
-    "report": "nyc report --reporter=lcov"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/nodejs/readable-stream"
-  },
-  "keywords": [
-    "readable",
-    "stream",
-    "pipe"
-  ],
-  "browser": {
-    "util": false,
-    "./readable.js": "./readable-browser.js",
-    "./writable.js": "./writable-browser.js",
-    "./duplex.js": "./duplex-browser.js",
-    "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js"
-  },
-  "nyc": {
-    "include": [
-      "lib/**.js"
-    ]
-  },
-  "license": "MIT"
-}
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/passthrough.js b/node_modules/archiver-utils/node_modules/readable-stream/passthrough.js
deleted file mode 100644
index ffd791d7f..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/passthrough.js
+++ /dev/null
@@ -1 +0,0 @@
-module.exports = require('./readable').PassThrough
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/readable-browser.js b/node_modules/archiver-utils/node_modules/readable-stream/readable-browser.js
deleted file mode 100644
index e50372592..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/readable-browser.js
+++ /dev/null
@@ -1,7 +0,0 @@
-exports = module.exports = require('./lib/_stream_readable.js');
-exports.Stream = exports;
-exports.Readable = exports;
-exports.Writable = require('./lib/_stream_writable.js');
-exports.Duplex = require('./lib/_stream_duplex.js');
-exports.Transform = require('./lib/_stream_transform.js');
-exports.PassThrough = require('./lib/_stream_passthrough.js');
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/readable.js b/node_modules/archiver-utils/node_modules/readable-stream/readable.js
deleted file mode 100644
index ec89ec533..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/readable.js
+++ /dev/null
@@ -1,19 +0,0 @@
-var Stream = require('stream');
-if (process.env.READABLE_STREAM === 'disable' && Stream) {
-  module.exports = Stream;
-  exports = module.exports = Stream.Readable;
-  exports.Readable = Stream.Readable;
-  exports.Writable = Stream.Writable;
-  exports.Duplex = Stream.Duplex;
-  exports.Transform = Stream.Transform;
-  exports.PassThrough = Stream.PassThrough;
-  exports.Stream = Stream;
-} else {
-  exports = module.exports = require('./lib/_stream_readable.js');
-  exports.Stream = Stream || exports;
-  exports.Readable = exports;
-  exports.Writable = require('./lib/_stream_writable.js');
-  exports.Duplex = require('./lib/_stream_duplex.js');
-  exports.Transform = require('./lib/_stream_transform.js');
-  exports.PassThrough = require('./lib/_stream_passthrough.js');
-}
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/transform.js b/node_modules/archiver-utils/node_modules/readable-stream/transform.js
deleted file mode 100644
index b1baba26d..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/transform.js
+++ /dev/null
@@ -1 +0,0 @@
-module.exports = require('./readable').Transform
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/writable-browser.js b/node_modules/archiver-utils/node_modules/readable-stream/writable-browser.js
deleted file mode 100644
index ebdde6a85..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/writable-browser.js
+++ /dev/null
@@ -1 +0,0 @@
-module.exports = require('./lib/_stream_writable.js');
diff --git a/node_modules/archiver-utils/node_modules/readable-stream/writable.js b/node_modules/archiver-utils/node_modules/readable-stream/writable.js
deleted file mode 100644
index 3211a6f80..000000000
--- a/node_modules/archiver-utils/node_modules/readable-stream/writable.js
+++ /dev/null
@@ -1,8 +0,0 @@
-var Stream = require("stream")
-var Writable = require("./lib/_stream_writable.js")
-
-if (process.env.READABLE_STREAM === 'disable') {
-  module.exports = Stream && Stream.Writable || Writable
-} else {
-  module.exports = Writable
-}
diff --git a/node_modules/archiver-utils/node_modules/string_decoder/lib/string_decoder.js b/node_modules/archiver-utils/node_modules/string_decoder/lib/string_decoder.js
deleted file mode 100644
index 2e89e63f7..000000000
--- a/node_modules/archiver-utils/node_modules/string_decoder/lib/string_decoder.js
+++ /dev/null
@@ -1,296 +0,0 @@
-// Copyright Joyent, Inc. and other Node contributors.
-//
-// Permission is hereby granted, free of charge, to any person obtaining a
-// copy of this software and associated documentation files (the
-// "Software"), to deal in the Software without restriction, including
-// without limitation the rights to use, copy, modify, merge, publish,
-// distribute, sublicense, and/or sell copies of the Software, and to permit
-// persons to whom the Software is furnished to do so, subject to the
-// following conditions:
-//
-// The above copyright notice and this permission notice shall be included
-// in all copies or substantial portions of the Software.
-//
-// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
-// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
-// USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-'use strict';
-
-/**/
-
-var Buffer = require('safe-buffer').Buffer;
-/**/
-
-var isEncoding = Buffer.isEncoding || function (encoding) {
-  encoding = '' + encoding;
-  switch (encoding && encoding.toLowerCase()) {
-    case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw':
-      return true;
-    default:
-      return false;
-  }
-};
-
-function _normalizeEncoding(enc) {
-  if (!enc) return 'utf8';
-  var retried;
-  while (true) {
-    switch (enc) {
-      case 'utf8':
-      case 'utf-8':
-        return 'utf8';
-      case 'ucs2':
-      case 'ucs-2':
-      case 'utf16le':
-      case 'utf-16le':
-        return 'utf16le';
-      case 'latin1':
-      case 'binary':
-        return 'latin1';
-      case 'base64':
-      case 'ascii':
-      case 'hex':
-        return enc;
-      default:
-        if (retried) return; // undefined
-        enc = ('' + enc).toLowerCase();
-        retried = true;
-    }
-  }
-};
-
-// Do not cache `Buffer.isEncoding` when checking encoding names as some
-// modules monkey-patch it to support additional encodings
-function normalizeEncoding(enc) {
-  var nenc = _normalizeEncoding(enc);
-  if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc);
-  return nenc || enc;
-}
-
-// StringDecoder provides an interface for efficiently splitting a series of
-// buffers into a series of JS strings without breaking apart multi-byte
-// characters.
-exports.StringDecoder = StringDecoder;
-function StringDecoder(encoding) {
-  this.encoding = normalizeEncoding(encoding);
-  var nb;
-  switch (this.encoding) {
-    case 'utf16le':
-      this.text = utf16Text;
-      this.end = utf16End;
-      nb = 4;
-      break;
-    case 'utf8':
-      this.fillLast = utf8FillLast;
-      nb = 4;
-      break;
-    case 'base64':
-      this.text = base64Text;
-      this.end = base64End;
-      nb = 3;
-      break;
-    default:
-      this.write = simpleWrite;
-      this.end = simpleEnd;
-      return;
-  }
-  this.lastNeed = 0;
-  this.lastTotal = 0;
-  this.lastChar = Buffer.allocUnsafe(nb);
-}
-
-StringDecoder.prototype.write = function (buf) {
-  if (buf.length === 0) return '';
-  var r;
-  var i;
-  if (this.lastNeed) {
-    r = this.fillLast(buf);
-    if (r === undefined) return '';
-    i = this.lastNeed;
-    this.lastNeed = 0;
-  } else {
-    i = 0;
-  }
-  if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i);
-  return r || '';
-};
-
-StringDecoder.prototype.end = utf8End;
-
-// Returns only complete characters in a Buffer
-StringDecoder.prototype.text = utf8Text;
-
-// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer
-StringDecoder.prototype.fillLast = function (buf) {
-  if (this.lastNeed <= buf.length) {
-    buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed);
-    return this.lastChar.toString(this.encoding, 0, this.lastTotal);
-  }
-  buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length);
-  this.lastNeed -= buf.length;
-};
-
-// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a
-// continuation byte. If an invalid byte is detected, -2 is returned.
-function utf8CheckByte(byte) {
-  if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4;
-  return byte >> 6 === 0x02 ? -1 : -2;
-}
-
-// Checks at most 3 bytes at the end of a Buffer in order to detect an
-// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4)
-// needed to complete the UTF-8 character (if applicable) are returned.
-function utf8CheckIncomplete(self, buf, i) {
-  var j = buf.length - 1;
-  if (j < i) return 0;
-  var nb = utf8CheckByte(buf[j]);
-  if (nb >= 0) {
-    if (nb > 0) self.lastNeed = nb - 1;
-    return nb;
-  }
-  if (--j < i || nb === -2) return 0;
-  nb = utf8CheckByte(buf[j]);
-  if (nb >= 0) {
-    if (nb > 0) self.lastNeed = nb - 2;
-    return nb;
-  }
-  if (--j < i || nb === -2) return 0;
-  nb = utf8CheckByte(buf[j]);
-  if (nb >= 0) {
-    if (nb > 0) {
-      if (nb === 2) nb = 0;else self.lastNeed = nb - 3;
-    }
-    return nb;
-  }
-  return 0;
-}
-
-// Validates as many continuation bytes for a multi-byte UTF-8 character as
-// needed or are available. If we see a non-continuation byte where we expect
-// one, we "replace" the validated continuation bytes we've seen so far with
-// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding
-// behavior. The continuation byte check is included three times in the case
-// where all of the continuation bytes for a character exist in the same buffer.
-// It is also done this way as a slight performance increase instead of using a
-// loop.
-function utf8CheckExtraBytes(self, buf, p) {
-  if ((buf[0] & 0xC0) !== 0x80) {
-    self.lastNeed = 0;
-    return '\ufffd';
-  }
-  if (self.lastNeed > 1 && buf.length > 1) {
-    if ((buf[1] & 0xC0) !== 0x80) {
-      self.lastNeed = 1;
-      return '\ufffd';
-    }
-    if (self.lastNeed > 2 && buf.length > 2) {
-      if ((buf[2] & 0xC0) !== 0x80) {
-        self.lastNeed = 2;
-        return '\ufffd';
-      }
-    }
-  }
-}
-
-// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer.
-function utf8FillLast(buf) {
-  var p = this.lastTotal - this.lastNeed;
-  var r = utf8CheckExtraBytes(this, buf, p);
-  if (r !== undefined) return r;
-  if (this.lastNeed <= buf.length) {
-    buf.copy(this.lastChar, p, 0, this.lastNeed);
-    return this.lastChar.toString(this.encoding, 0, this.lastTotal);
-  }
-  buf.copy(this.lastChar, p, 0, buf.length);
-  this.lastNeed -= buf.length;
-}
-
-// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a
-// partial character, the character's bytes are buffered until the required
-// number of bytes are available.
-function utf8Text(buf, i) {
-  var total = utf8CheckIncomplete(this, buf, i);
-  if (!this.lastNeed) return buf.toString('utf8', i);
-  this.lastTotal = total;
-  var end = buf.length - (total - this.lastNeed);
-  buf.copy(this.lastChar, 0, end);
-  return buf.toString('utf8', i, end);
-}
-
-// For UTF-8, a replacement character is added when ending on a partial
-// character.
-function utf8End(buf) {
-  var r = buf && buf.length ? this.write(buf) : '';
-  if (this.lastNeed) return r + '\ufffd';
-  return r;
-}
-
-// UTF-16LE typically needs two bytes per character, but even if we have an even
-// number of bytes available, we need to check if we end on a leading/high
-// surrogate. In that case, we need to wait for the next two bytes in order to
-// decode the last character properly.
-function utf16Text(buf, i) {
-  if ((buf.length - i) % 2 === 0) {
-    var r = buf.toString('utf16le', i);
-    if (r) {
-      var c = r.charCodeAt(r.length - 1);
-      if (c >= 0xD800 && c <= 0xDBFF) {
-        this.lastNeed = 2;
-        this.lastTotal = 4;
-        this.lastChar[0] = buf[buf.length - 2];
-        this.lastChar[1] = buf[buf.length - 1];
-        return r.slice(0, -1);
-      }
-    }
-    return r;
-  }
-  this.lastNeed = 1;
-  this.lastTotal = 2;
-  this.lastChar[0] = buf[buf.length - 1];
-  return buf.toString('utf16le', i, buf.length - 1);
-}
-
-// For UTF-16LE we do not explicitly append special replacement characters if we
-// end on a partial character, we simply let v8 handle that.
-function utf16End(buf) {
-  var r = buf && buf.length ? this.write(buf) : '';
-  if (this.lastNeed) {
-    var end = this.lastTotal - this.lastNeed;
-    return r + this.lastChar.toString('utf16le', 0, end);
-  }
-  return r;
-}
-
-function base64Text(buf, i) {
-  var n = (buf.length - i) % 3;
-  if (n === 0) return buf.toString('base64', i);
-  this.lastNeed = 3 - n;
-  this.lastTotal = 3;
-  if (n === 1) {
-    this.lastChar[0] = buf[buf.length - 1];
-  } else {
-    this.lastChar[0] = buf[buf.length - 2];
-    this.lastChar[1] = buf[buf.length - 1];
-  }
-  return buf.toString('base64', i, buf.length - n);
-}
-
-function base64End(buf) {
-  var r = buf && buf.length ? this.write(buf) : '';
-  if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed);
-  return r;
-}
-
-// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex)
-function simpleWrite(buf) {
-  return buf.toString(this.encoding);
-}
-
-function simpleEnd(buf) {
-  return buf && buf.length ? this.write(buf) : '';
-}
\ No newline at end of file
diff --git a/node_modules/archiver-utils/node_modules/string_decoder/package.json b/node_modules/archiver-utils/node_modules/string_decoder/package.json
deleted file mode 100644
index 518c3eb9f..000000000
--- a/node_modules/archiver-utils/node_modules/string_decoder/package.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
-  "name": "string_decoder",
-  "version": "1.1.1",
-  "description": "The string_decoder module from Node core",
-  "main": "lib/string_decoder.js",
-  "dependencies": {
-    "safe-buffer": "~5.1.0"
-  },
-  "devDependencies": {
-    "babel-polyfill": "^6.23.0",
-    "core-util-is": "^1.0.2",
-    "inherits": "^2.0.3",
-    "tap": "~0.4.8"
-  },
-  "scripts": {
-    "test": "tap test/parallel/*.js && node test/verify-dependencies",
-    "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/nodejs/string_decoder.git"
-  },
-  "homepage": "https://github.com/nodejs/string_decoder",
-  "keywords": [
-    "string",
-    "decoder",
-    "browser",
-    "browserify"
-  ],
-  "license": "MIT"
-}
diff --git a/node_modules/archiver-utils/package.json b/node_modules/archiver-utils/package.json
index 1582f1854..23ec1a0db 100644
--- a/node_modules/archiver-utils/package.json
+++ b/node_modules/archiver-utils/package.json
@@ -1,6 +1,6 @@
 {
   "name": "archiver-utils",
-  "version": "2.1.0",
+  "version": "5.0.2",
   "license": "MIT",
   "description": "utility functions for archiver",
   "homepage": "https://github.com/archiverjs/archiver-utils#readme",
@@ -25,30 +25,24 @@
     "file.js"
   ],
   "engines": {
-    "node": ">= 6"
+    "node": ">= 14"
   },
   "scripts": {
     "test": "mocha --reporter dot"
   },
   "dependencies": {
-    "glob": "^7.1.4",
+    "glob": "^10.0.0",
     "graceful-fs": "^4.2.0",
+    "is-stream": "^2.0.1",
     "lazystream": "^1.0.0",
-    "lodash.defaults": "^4.2.0",
-    "lodash.difference": "^4.5.0",
-    "lodash.flatten": "^4.4.0",
-    "lodash.isplainobject": "^4.0.6",
-    "lodash.union": "^4.6.0",
+    "lodash": "^4.17.15",
     "normalize-path": "^3.0.0",
-    "readable-stream": "^2.0.0"
+    "readable-stream": "^4.0.0"
   },
   "devDependencies": {
-    "chai": "^4.2.0",
-    "mkdirp": "^0.5.0",
-    "mocha": "^5.0.0",
-    "rimraf": "^2.6.3"
-  },
-  "publishConfig": {
-    "registry": "https://registry.npmjs.org/"
+    "chai": "4.4.1",
+    "mkdirp": "3.0.1",
+    "mocha": "10.3.0",
+    "rimraf": "5.0.5"
   }
 }
diff --git a/node_modules/archiver/package.json b/node_modules/archiver/package.json
index 1ec13196c..1ffc57f75 100644
--- a/node_modules/archiver/package.json
+++ b/node_modules/archiver/package.json
@@ -1,6 +1,6 @@
 {
   "name": "archiver",
-  "version": "5.3.2",
+  "version": "7.0.1",
   "description": "a streaming interface for archive generation",
   "homepage": "https://github.com/archiverjs/node-archiver",
   "author": {
@@ -21,31 +21,31 @@
     "lib"
   ],
   "engines": {
-    "node": ">= 10"
+    "node": ">= 14"
   },
   "scripts": {
     "test": "mocha --reporter dot",
     "bench": "node benchmark/simple/pack-zip.js"
   },
   "dependencies": {
-    "archiver-utils": "^2.1.0",
+    "archiver-utils": "^5.0.2",
     "async": "^3.2.4",
-    "buffer-crc32": "^0.2.1",
-    "readable-stream": "^3.6.0",
+    "buffer-crc32": "^1.0.0",
+    "readable-stream": "^4.0.0",
     "readdir-glob": "^1.1.2",
-    "tar-stream": "^2.2.0",
-    "zip-stream": "^4.1.0"
+    "tar-stream": "^3.0.0",
+    "zip-stream": "^6.0.1"
   },
   "devDependencies": {
-    "archiver-jsdoc-theme": "^1.1.3",
-    "chai": "^4.3.7",
-    "jsdoc": "^3.6.4",
-    "mkdirp": "^2.1.5",
-    "mocha": "^9.0.2",
-    "rimraf": "^4.3.1",
-    "stream-bench": "^0.1.2",
-    "tar": "^6.1.13",
-    "yauzl": "^2.9.0"
+    "archiver-jsdoc-theme": "1.1.3",
+    "chai": "4.4.1",
+    "jsdoc": "4.0.2",
+    "mkdirp": "3.0.1",
+    "mocha": "10.3.0",
+    "rimraf": "5.0.5",
+    "stream-bench": "0.1.2",
+    "tar": "6.2.0",
+    "yauzl": "3.1.2"
   },
   "keywords": [
     "archive",
diff --git a/node_modules/asynckit/bench.js b/node_modules/asynckit/bench.js
deleted file mode 100644
index c612f1a55..000000000
--- a/node_modules/asynckit/bench.js
+++ /dev/null
@@ -1,76 +0,0 @@
-/* eslint no-console: "off" */
-
-var asynckit = require('./')
-  , async    = require('async')
-  , assert   = require('assert')
-  , expected = 0
-  ;
-
-var Benchmark = require('benchmark');
-var suite = new Benchmark.Suite;
-
-var source = [];
-for (var z = 1; z < 100; z++)
-{
-  source.push(z);
-  expected += z;
-}
-
-suite
-// add tests
-
-.add('async.map', function(deferred)
-{
-  var total = 0;
-
-  async.map(source,
-  function(i, cb)
-  {
-    setImmediate(function()
-    {
-      total += i;
-      cb(null, total);
-    });
-  },
-  function(err, result)
-  {
-    assert.ifError(err);
-    assert.equal(result[result.length - 1], expected);
-    deferred.resolve();
-  });
-}, {'defer': true})
-
-
-.add('asynckit.parallel', function(deferred)
-{
-  var total = 0;
-
-  asynckit.parallel(source,
-  function(i, cb)
-  {
-    setImmediate(function()
-    {
-      total += i;
-      cb(null, total);
-    });
-  },
-  function(err, result)
-  {
-    assert.ifError(err);
-    assert.equal(result[result.length - 1], expected);
-    deferred.resolve();
-  });
-}, {'defer': true})
-
-
-// add listeners
-.on('cycle', function(ev)
-{
-  console.log(String(ev.target));
-})
-.on('complete', function()
-{
-  console.log('Fastest is ' + this.filter('fastest').map('name'));
-})
-// run async
-.run({ 'async': true });
diff --git a/node_modules/asynckit/index.js b/node_modules/asynckit/index.js
deleted file mode 100644
index 455f9454e..000000000
--- a/node_modules/asynckit/index.js
+++ /dev/null
@@ -1,6 +0,0 @@
-module.exports =
-{
-  parallel      : require('./parallel.js'),
-  serial        : require('./serial.js'),
-  serialOrdered : require('./serialOrdered.js')
-};
diff --git a/node_modules/asynckit/lib/abort.js b/node_modules/asynckit/lib/abort.js
deleted file mode 100644
index 114367e5f..000000000
--- a/node_modules/asynckit/lib/abort.js
+++ /dev/null
@@ -1,29 +0,0 @@
-// API
-module.exports = abort;
-
-/**
- * Aborts leftover active jobs
- *
- * @param {object} state - current state object
- */
-function abort(state)
-{
-  Object.keys(state.jobs).forEach(clean.bind(state));
-
-  // reset leftover jobs
-  state.jobs = {};
-}
-
-/**
- * Cleans up leftover job by invoking abort function for the provided job id
- *
- * @this  state
- * @param {string|number} key - job id to abort
- */
-function clean(key)
-{
-  if (typeof this.jobs[key] == 'function')
-  {
-    this.jobs[key]();
-  }
-}
diff --git a/node_modules/asynckit/lib/async.js b/node_modules/asynckit/lib/async.js
deleted file mode 100644
index 7f1288a4c..000000000
--- a/node_modules/asynckit/lib/async.js
+++ /dev/null
@@ -1,34 +0,0 @@
-var defer = require('./defer.js');
-
-// API
-module.exports = async;
-
-/**
- * Runs provided callback asynchronously
- * even if callback itself is not
- *
- * @param   {function} callback - callback to invoke
- * @returns {function} - augmented callback
- */
-function async(callback)
-{
-  var isAsync = false;
-
-  // check if async happened
-  defer(function() { isAsync = true; });
-
-  return function async_callback(err, result)
-  {
-    if (isAsync)
-    {
-      callback(err, result);
-    }
-    else
-    {
-      defer(function nextTick_callback()
-      {
-        callback(err, result);
-      });
-    }
-  };
-}
diff --git a/node_modules/asynckit/lib/defer.js b/node_modules/asynckit/lib/defer.js
deleted file mode 100644
index b67110c7a..000000000
--- a/node_modules/asynckit/lib/defer.js
+++ /dev/null
@@ -1,26 +0,0 @@
-module.exports = defer;
-
-/**
- * Runs provided function on next iteration of the event loop
- *
- * @param {function} fn - function to run
- */
-function defer(fn)
-{
-  var nextTick = typeof setImmediate == 'function'
-    ? setImmediate
-    : (
-      typeof process == 'object' && typeof process.nextTick == 'function'
-      ? process.nextTick
-      : null
-    );
-
-  if (nextTick)
-  {
-    nextTick(fn);
-  }
-  else
-  {
-    setTimeout(fn, 0);
-  }
-}
diff --git a/node_modules/asynckit/lib/iterate.js b/node_modules/asynckit/lib/iterate.js
deleted file mode 100644
index 5d2839a59..000000000
--- a/node_modules/asynckit/lib/iterate.js
+++ /dev/null
@@ -1,75 +0,0 @@
-var async = require('./async.js')
-  , abort = require('./abort.js')
-  ;
-
-// API
-module.exports = iterate;
-
-/**
- * Iterates over each job object
- *
- * @param {array|object} list - array or object (named list) to iterate over
- * @param {function} iterator - iterator to run
- * @param {object} state - current job status
- * @param {function} callback - invoked when all elements processed
- */
-function iterate(list, iterator, state, callback)
-{
-  // store current index
-  var key = state['keyedList'] ? state['keyedList'][state.index] : state.index;
-
-  state.jobs[key] = runJob(iterator, key, list[key], function(error, output)
-  {
-    // don't repeat yourself
-    // skip secondary callbacks
-    if (!(key in state.jobs))
-    {
-      return;
-    }
-
-    // clean up jobs
-    delete state.jobs[key];
-
-    if (error)
-    {
-      // don't process rest of the results
-      // stop still active jobs
-      // and reset the list
-      abort(state);
-    }
-    else
-    {
-      state.results[key] = output;
-    }
-
-    // return salvaged results
-    callback(error, state.results);
-  });
-}
-
-/**
- * Runs iterator over provided job element
- *
- * @param   {function} iterator - iterator to invoke
- * @param   {string|number} key - key/index of the element in the list of jobs
- * @param   {mixed} item - job description
- * @param   {function} callback - invoked after iterator is done with the job
- * @returns {function|mixed} - job abort function or something else
- */
-function runJob(iterator, key, item, callback)
-{
-  var aborter;
-
-  // allow shortcut if iterator expects only two arguments
-  if (iterator.length == 2)
-  {
-    aborter = iterator(item, async(callback));
-  }
-  // otherwise go with full three arguments
-  else
-  {
-    aborter = iterator(item, key, async(callback));
-  }
-
-  return aborter;
-}
diff --git a/node_modules/asynckit/lib/readable_asynckit.js b/node_modules/asynckit/lib/readable_asynckit.js
deleted file mode 100644
index 78ad240f0..000000000
--- a/node_modules/asynckit/lib/readable_asynckit.js
+++ /dev/null
@@ -1,91 +0,0 @@
-var streamify = require('./streamify.js')
-  , defer     = require('./defer.js')
-  ;
-
-// API
-module.exports = ReadableAsyncKit;
-
-/**
- * Base constructor for all streams
- * used to hold properties/methods
- */
-function ReadableAsyncKit()
-{
-  ReadableAsyncKit.super_.apply(this, arguments);
-
-  // list of active jobs
-  this.jobs = {};
-
-  // add stream methods
-  this.destroy = destroy;
-  this._start  = _start;
-  this._read   = _read;
-}
-
-/**
- * Destroys readable stream,
- * by aborting outstanding jobs
- *
- * @returns {void}
- */
-function destroy()
-{
-  if (this.destroyed)
-  {
-    return;
-  }
-
-  this.destroyed = true;
-
-  if (typeof this.terminator == 'function')
-  {
-    this.terminator();
-  }
-}
-
-/**
- * Starts provided jobs in async manner
- *
- * @private
- */
-function _start()
-{
-  // first argument – runner function
-  var runner = arguments[0]
-    // take away first argument
-    , args   = Array.prototype.slice.call(arguments, 1)
-      // second argument - input data
-    , input  = args[0]
-      // last argument - result callback
-    , endCb  = streamify.callback.call(this, args[args.length - 1])
-    ;
-
-  args[args.length - 1] = endCb;
-  // third argument - iterator
-  args[1] = streamify.iterator.call(this, args[1]);
-
-  // allow time for proper setup
-  defer(function()
-  {
-    if (!this.destroyed)
-    {
-      this.terminator = runner.apply(null, args);
-    }
-    else
-    {
-      endCb(null, Array.isArray(input) ? [] : {});
-    }
-  }.bind(this));
-}
-
-
-/**
- * Implement _read to comply with Readable streams
- * Doesn't really make sense for flowing object mode
- *
- * @private
- */
-function _read()
-{
-
-}
diff --git a/node_modules/asynckit/lib/readable_parallel.js b/node_modules/asynckit/lib/readable_parallel.js
deleted file mode 100644
index 5d2929f7a..000000000
--- a/node_modules/asynckit/lib/readable_parallel.js
+++ /dev/null
@@ -1,25 +0,0 @@
-var parallel = require('../parallel.js');
-
-// API
-module.exports = ReadableParallel;
-
-/**
- * Streaming wrapper to `asynckit.parallel`
- *
- * @param   {array|object} list - array or object (named list) to iterate over
- * @param   {function} iterator - iterator to run
- * @param   {function} callback - invoked when all elements processed
- * @returns {stream.Readable#}
- */
-function ReadableParallel(list, iterator, callback)
-{
-  if (!(this instanceof ReadableParallel))
-  {
-    return new ReadableParallel(list, iterator, callback);
-  }
-
-  // turn on object mode
-  ReadableParallel.super_.call(this, {objectMode: true});
-
-  this._start(parallel, list, iterator, callback);
-}
diff --git a/node_modules/asynckit/lib/readable_serial.js b/node_modules/asynckit/lib/readable_serial.js
deleted file mode 100644
index 782269820..000000000
--- a/node_modules/asynckit/lib/readable_serial.js
+++ /dev/null
@@ -1,25 +0,0 @@
-var serial = require('../serial.js');
-
-// API
-module.exports = ReadableSerial;
-
-/**
- * Streaming wrapper to `asynckit.serial`
- *
- * @param   {array|object} list - array or object (named list) to iterate over
- * @param   {function} iterator - iterator to run
- * @param   {function} callback - invoked when all elements processed
- * @returns {stream.Readable#}
- */
-function ReadableSerial(list, iterator, callback)
-{
-  if (!(this instanceof ReadableSerial))
-  {
-    return new ReadableSerial(list, iterator, callback);
-  }
-
-  // turn on object mode
-  ReadableSerial.super_.call(this, {objectMode: true});
-
-  this._start(serial, list, iterator, callback);
-}
diff --git a/node_modules/asynckit/lib/readable_serial_ordered.js b/node_modules/asynckit/lib/readable_serial_ordered.js
deleted file mode 100644
index 3de89c472..000000000
--- a/node_modules/asynckit/lib/readable_serial_ordered.js
+++ /dev/null
@@ -1,29 +0,0 @@
-var serialOrdered = require('../serialOrdered.js');
-
-// API
-module.exports = ReadableSerialOrdered;
-// expose sort helpers
-module.exports.ascending  = serialOrdered.ascending;
-module.exports.descending = serialOrdered.descending;
-
-/**
- * Streaming wrapper to `asynckit.serialOrdered`
- *
- * @param   {array|object} list - array or object (named list) to iterate over
- * @param   {function} iterator - iterator to run
- * @param   {function} sortMethod - custom sort function
- * @param   {function} callback - invoked when all elements processed
- * @returns {stream.Readable#}
- */
-function ReadableSerialOrdered(list, iterator, sortMethod, callback)
-{
-  if (!(this instanceof ReadableSerialOrdered))
-  {
-    return new ReadableSerialOrdered(list, iterator, sortMethod, callback);
-  }
-
-  // turn on object mode
-  ReadableSerialOrdered.super_.call(this, {objectMode: true});
-
-  this._start(serialOrdered, list, iterator, sortMethod, callback);
-}
diff --git a/node_modules/asynckit/lib/state.js b/node_modules/asynckit/lib/state.js
deleted file mode 100644
index cbea7ad8f..000000000
--- a/node_modules/asynckit/lib/state.js
+++ /dev/null
@@ -1,37 +0,0 @@
-// API
-module.exports = state;
-
-/**
- * Creates initial state object
- * for iteration over list
- *
- * @param   {array|object} list - list to iterate over
- * @param   {function|null} sortMethod - function to use for keys sort,
- *                                     or `null` to keep them as is
- * @returns {object} - initial state object
- */
-function state(list, sortMethod)
-{
-  var isNamedList = !Array.isArray(list)
-    , initState =
-    {
-      index    : 0,
-      keyedList: isNamedList || sortMethod ? Object.keys(list) : null,
-      jobs     : {},
-      results  : isNamedList ? {} : [],
-      size     : isNamedList ? Object.keys(list).length : list.length
-    }
-    ;
-
-  if (sortMethod)
-  {
-    // sort array keys based on it's values
-    // sort object's keys just on own merit
-    initState.keyedList.sort(isNamedList ? sortMethod : function(a, b)
-    {
-      return sortMethod(list[a], list[b]);
-    });
-  }
-
-  return initState;
-}
diff --git a/node_modules/asynckit/lib/streamify.js b/node_modules/asynckit/lib/streamify.js
deleted file mode 100644
index f56a1c92b..000000000
--- a/node_modules/asynckit/lib/streamify.js
+++ /dev/null
@@ -1,141 +0,0 @@
-var async = require('./async.js');
-
-// API
-module.exports = {
-  iterator: wrapIterator,
-  callback: wrapCallback
-};
-
-/**
- * Wraps iterators with long signature
- *
- * @this    ReadableAsyncKit#
- * @param   {function} iterator - function to wrap
- * @returns {function} - wrapped function
- */
-function wrapIterator(iterator)
-{
-  var stream = this;
-
-  return function(item, key, cb)
-  {
-    var aborter
-      , wrappedCb = async(wrapIteratorCallback.call(stream, cb, key))
-      ;
-
-    stream.jobs[key] = wrappedCb;
-
-    // it's either shortcut (item, cb)
-    if (iterator.length == 2)
-    {
-      aborter = iterator(item, wrappedCb);
-    }
-    // or long format (item, key, cb)
-    else
-    {
-      aborter = iterator(item, key, wrappedCb);
-    }
-
-    return aborter;
-  };
-}
-
-/**
- * Wraps provided callback function
- * allowing to execute snitch function before
- * real callback
- *
- * @this    ReadableAsyncKit#
- * @param   {function} callback - function to wrap
- * @returns {function} - wrapped function
- */
-function wrapCallback(callback)
-{
-  var stream = this;
-
-  var wrapped = function(error, result)
-  {
-    return finisher.call(stream, error, result, callback);
-  };
-
-  return wrapped;
-}
-
-/**
- * Wraps provided iterator callback function
- * makes sure snitch only called once,
- * but passes secondary calls to the original callback
- *
- * @this    ReadableAsyncKit#
- * @param   {function} callback - callback to wrap
- * @param   {number|string} key - iteration key
- * @returns {function} wrapped callback
- */
-function wrapIteratorCallback(callback, key)
-{
-  var stream = this;
-
-  return function(error, output)
-  {
-    // don't repeat yourself
-    if (!(key in stream.jobs))
-    {
-      callback(error, output);
-      return;
-    }
-
-    // clean up jobs
-    delete stream.jobs[key];
-
-    return streamer.call(stream, error, {key: key, value: output}, callback);
-  };
-}
-
-/**
- * Stream wrapper for iterator callback
- *
- * @this  ReadableAsyncKit#
- * @param {mixed} error - error response
- * @param {mixed} output - iterator output
- * @param {function} callback - callback that expects iterator results
- */
-function streamer(error, output, callback)
-{
-  if (error && !this.error)
-  {
-    this.error = error;
-    this.pause();
-    this.emit('error', error);
-    // send back value only, as expected
-    callback(error, output && output.value);
-    return;
-  }
-
-  // stream stuff
-  this.push(output);
-
-  // back to original track
-  // send back value only, as expected
-  callback(error, output && output.value);
-}
-
-/**
- * Stream wrapper for finishing callback
- *
- * @this  ReadableAsyncKit#
- * @param {mixed} error - error response
- * @param {mixed} output - iterator output
- * @param {function} callback - callback that expects final results
- */
-function finisher(error, output, callback)
-{
-  // signal end of the stream
-  // only for successfully finished streams
-  if (!error)
-  {
-    this.push(null);
-  }
-
-  // back to original track
-  callback(error, output);
-}
diff --git a/node_modules/asynckit/lib/terminator.js b/node_modules/asynckit/lib/terminator.js
deleted file mode 100644
index d6eb99219..000000000
--- a/node_modules/asynckit/lib/terminator.js
+++ /dev/null
@@ -1,29 +0,0 @@
-var abort = require('./abort.js')
-  , async = require('./async.js')
-  ;
-
-// API
-module.exports = terminator;
-
-/**
- * Terminates jobs in the attached state context
- *
- * @this  AsyncKitState#
- * @param {function} callback - final callback to invoke after termination
- */
-function terminator(callback)
-{
-  if (!Object.keys(this.jobs).length)
-  {
-    return;
-  }
-
-  // fast forward iteration index
-  this.index = this.size;
-
-  // abort jobs
-  abort(this);
-
-  // send back results we have so far
-  async(callback)(null, this.results);
-}
diff --git a/node_modules/asynckit/package.json b/node_modules/asynckit/package.json
deleted file mode 100644
index 51147d656..000000000
--- a/node_modules/asynckit/package.json
+++ /dev/null
@@ -1,63 +0,0 @@
-{
-  "name": "asynckit",
-  "version": "0.4.0",
-  "description": "Minimal async jobs utility library, with streams support",
-  "main": "index.js",
-  "scripts": {
-    "clean": "rimraf coverage",
-    "lint": "eslint *.js lib/*.js test/*.js",
-    "test": "istanbul cover --reporter=json tape -- 'test/test-*.js' | tap-spec",
-    "win-test": "tape test/test-*.js",
-    "browser": "browserify -t browserify-istanbul test/lib/browserify_adjustment.js test/test-*.js | obake --coverage | tap-spec",
-    "report": "istanbul report",
-    "size": "browserify index.js | size-table asynckit",
-    "debug": "tape test/test-*.js"
-  },
-  "pre-commit": [
-    "clean",
-    "lint",
-    "test",
-    "browser",
-    "report",
-    "size"
-  ],
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/alexindigo/asynckit.git"
-  },
-  "keywords": [
-    "async",
-    "jobs",
-    "parallel",
-    "serial",
-    "iterator",
-    "array",
-    "object",
-    "stream",
-    "destroy",
-    "terminate",
-    "abort"
-  ],
-  "author": "Alex Indigo ",
-  "license": "MIT",
-  "bugs": {
-    "url": "https://github.com/alexindigo/asynckit/issues"
-  },
-  "homepage": "https://github.com/alexindigo/asynckit#readme",
-  "devDependencies": {
-    "browserify": "^13.0.0",
-    "browserify-istanbul": "^2.0.0",
-    "coveralls": "^2.11.9",
-    "eslint": "^2.9.0",
-    "istanbul": "^0.4.3",
-    "obake": "^0.1.2",
-    "phantomjs-prebuilt": "^2.1.7",
-    "pre-commit": "^1.1.3",
-    "reamde": "^1.1.0",
-    "rimraf": "^2.5.2",
-    "size-table": "^0.2.0",
-    "tap-spec": "^4.1.1",
-    "tape": "^4.5.1"
-  },
-  "dependencies": {}
-}
diff --git a/node_modules/asynckit/parallel.js b/node_modules/asynckit/parallel.js
deleted file mode 100644
index 3c50344d8..000000000
--- a/node_modules/asynckit/parallel.js
+++ /dev/null
@@ -1,43 +0,0 @@
-var iterate    = require('./lib/iterate.js')
-  , initState  = require('./lib/state.js')
-  , terminator = require('./lib/terminator.js')
-  ;
-
-// Public API
-module.exports = parallel;
-
-/**
- * Runs iterator over provided array elements in parallel
- *
- * @param   {array|object} list - array or object (named list) to iterate over
- * @param   {function} iterator - iterator to run
- * @param   {function} callback - invoked when all elements processed
- * @returns {function} - jobs terminator
- */
-function parallel(list, iterator, callback)
-{
-  var state = initState(list);
-
-  while (state.index < (state['keyedList'] || list).length)
-  {
-    iterate(list, iterator, state, function(error, result)
-    {
-      if (error)
-      {
-        callback(error, result);
-        return;
-      }
-
-      // looks like it's the last one
-      if (Object.keys(state.jobs).length === 0)
-      {
-        callback(null, state.results);
-        return;
-      }
-    });
-
-    state.index++;
-  }
-
-  return terminator.bind(state, callback);
-}
diff --git a/node_modules/asynckit/serial.js b/node_modules/asynckit/serial.js
deleted file mode 100644
index 6cd949a67..000000000
--- a/node_modules/asynckit/serial.js
+++ /dev/null
@@ -1,17 +0,0 @@
-var serialOrdered = require('./serialOrdered.js');
-
-// Public API
-module.exports = serial;
-
-/**
- * Runs iterator over provided array elements in series
- *
- * @param   {array|object} list - array or object (named list) to iterate over
- * @param   {function} iterator - iterator to run
- * @param   {function} callback - invoked when all elements processed
- * @returns {function} - jobs terminator
- */
-function serial(list, iterator, callback)
-{
-  return serialOrdered(list, iterator, null, callback);
-}
diff --git a/node_modules/asynckit/serialOrdered.js b/node_modules/asynckit/serialOrdered.js
deleted file mode 100644
index 607eafea5..000000000
--- a/node_modules/asynckit/serialOrdered.js
+++ /dev/null
@@ -1,75 +0,0 @@
-var iterate    = require('./lib/iterate.js')
-  , initState  = require('./lib/state.js')
-  , terminator = require('./lib/terminator.js')
-  ;
-
-// Public API
-module.exports = serialOrdered;
-// sorting helpers
-module.exports.ascending  = ascending;
-module.exports.descending = descending;
-
-/**
- * Runs iterator over provided sorted array elements in series
- *
- * @param   {array|object} list - array or object (named list) to iterate over
- * @param   {function} iterator - iterator to run
- * @param   {function} sortMethod - custom sort function
- * @param   {function} callback - invoked when all elements processed
- * @returns {function} - jobs terminator
- */
-function serialOrdered(list, iterator, sortMethod, callback)
-{
-  var state = initState(list, sortMethod);
-
-  iterate(list, iterator, state, function iteratorHandler(error, result)
-  {
-    if (error)
-    {
-      callback(error, result);
-      return;
-    }
-
-    state.index++;
-
-    // are we there yet?
-    if (state.index < (state['keyedList'] || list).length)
-    {
-      iterate(list, iterator, state, iteratorHandler);
-      return;
-    }
-
-    // done here
-    callback(null, state.results);
-  });
-
-  return terminator.bind(state, callback);
-}
-
-/*
- * -- Sort methods
- */
-
-/**
- * sort helper to sort array elements in ascending order
- *
- * @param   {mixed} a - an item to compare
- * @param   {mixed} b - an item to compare
- * @returns {number} - comparison result
- */
-function ascending(a, b)
-{
-  return a < b ? -1 : a > b ? 1 : 0;
-}
-
-/**
- * sort helper to sort array elements in descending order
- *
- * @param   {mixed} a - an item to compare
- * @param   {mixed} b - an item to compare
- * @returns {number} - comparison result
- */
-function descending(a, b)
-{
-  return -1 * ascending(a, b);
-}
diff --git a/node_modules/asynckit/stream.js b/node_modules/asynckit/stream.js
deleted file mode 100644
index d43465f90..000000000
--- a/node_modules/asynckit/stream.js
+++ /dev/null
@@ -1,21 +0,0 @@
-var inherits              = require('util').inherits
-  , Readable              = require('stream').Readable
-  , ReadableAsyncKit      = require('./lib/readable_asynckit.js')
-  , ReadableParallel      = require('./lib/readable_parallel.js')
-  , ReadableSerial        = require('./lib/readable_serial.js')
-  , ReadableSerialOrdered = require('./lib/readable_serial_ordered.js')
-  ;
-
-// API
-module.exports =
-{
-  parallel      : ReadableParallel,
-  serial        : ReadableSerial,
-  serialOrdered : ReadableSerialOrdered, 
-};
-
-inherits(ReadableAsyncKit, Readable);
-
-inherits(ReadableParallel, ReadableAsyncKit);
-inherits(ReadableSerial, ReadableAsyncKit);
-inherits(ReadableSerialOrdered, ReadableAsyncKit);
diff --git a/node_modules/axe-core/axe.js b/node_modules/axe-core/axe.js
index 784d214d2..2e9c907ce 100644
--- a/node_modules/axe-core/axe.js
+++ b/node_modules/axe-core/axe.js
@@ -1,4 +1,4 @@
-/*! axe v4.9.0
+/*! axe v4.9.1
  * Copyright (c) 2015 - 2024 Deque Systems, Inc.
  *
  * Your use of this Source Code Form is subject to the terms of the Mozilla Public
@@ -22,7 +22,7 @@
     }, _typeof(o);
   }
   var axe = axe || {};
-  axe.version = '4.9.0';
+  axe.version = '4.9.1';
   if (typeof define === 'function' && define.amd) {
     define('axe-core', [], function() {
       return axe;
@@ -67,57 +67,6 @@
     }
     return obj;
   }
-  function _classPrivateMethodInitSpec(obj, privateSet) {
-    _checkPrivateRedeclaration(obj, privateSet);
-    privateSet.add(obj);
-  }
-  function _classPrivateFieldInitSpec(obj, privateMap, value) {
-    _checkPrivateRedeclaration(obj, privateMap);
-    privateMap.set(obj, value);
-  }
-  function _checkPrivateRedeclaration(obj, privateCollection) {
-    if (privateCollection.has(obj)) {
-      throw new TypeError('Cannot initialize the same private elements twice on an object');
-    }
-  }
-  function _classPrivateMethodGet(receiver, privateSet, fn) {
-    _assertClassBrand(privateSet, receiver);
-    return fn;
-  }
-  function _classPrivateFieldSet(receiver, privateMap, value) {
-    var descriptor = _classPrivateFieldGet2(privateMap, receiver);
-    _classApplyDescriptorSet(receiver, descriptor, value);
-    return value;
-  }
-  function _classApplyDescriptorSet(receiver, descriptor, value) {
-    if (descriptor.set) {
-      descriptor.set.call(receiver, value);
-    } else {
-      if (!descriptor.writable) {
-        throw new TypeError('attempted to set read only private field');
-      }
-      descriptor.value = value;
-    }
-  }
-  function _classPrivateFieldGet(receiver, privateMap) {
-    var descriptor = _classPrivateFieldGet2(privateMap, receiver);
-    return _classApplyDescriptorGet(receiver, descriptor);
-  }
-  function _classPrivateFieldGet2(s, a) {
-    return s.get(_assertClassBrand(s, a));
-  }
-  function _assertClassBrand(e, t, n) {
-    if ('function' == typeof e ? e === t : e.has(t)) {
-      return arguments.length < 3 ? t : n;
-    }
-    throw new TypeError('Private element is not present on this object');
-  }
-  function _classApplyDescriptorGet(receiver, descriptor) {
-    if (descriptor.get) {
-      return descriptor.get.call(receiver);
-    }
-    return descriptor.value;
-  }
   function _construct(t, e, r) {
     if (_isNativeReflectConstruct()) {
       return Reflect.construct.apply(null, arguments);
@@ -183,6 +132,31 @@
     };
     return _setPrototypeOf(o, p);
   }
+  function _classPrivateFieldInitSpec(obj, privateMap, value) {
+    _checkPrivateRedeclaration(obj, privateMap);
+    privateMap.set(obj, value);
+  }
+  function _classPrivateMethodInitSpec(obj, privateSet) {
+    _checkPrivateRedeclaration(obj, privateSet);
+    privateSet.add(obj);
+  }
+  function _checkPrivateRedeclaration(obj, privateCollection) {
+    if (privateCollection.has(obj)) {
+      throw new TypeError('Cannot initialize the same private elements twice on an object');
+    }
+  }
+  function _classPrivateFieldGet(s, a) {
+    return s.get(_assertClassBrand(s, a));
+  }
+  function _classPrivateFieldSet(s, a, r) {
+    return s.set(_assertClassBrand(s, a), r), r;
+  }
+  function _assertClassBrand(e, t, n) {
+    if ('function' == typeof e ? e === t : e.has(t)) {
+      return arguments.length < 3 ? t : n;
+    }
+    throw new TypeError('Private element is not present on this object');
+  }
   function _objectWithoutProperties(source, excluded) {
     if (source == null) {
       return {};
@@ -320,7 +294,7 @@
   }
   function _toPropertyKey(t) {
     var i = _toPrimitive(t, 'string');
-    return 'symbol' == _typeof(i) ? i : String(i);
+    return 'symbol' == _typeof(i) ? i : i + '';
   }
   function _toPrimitive(t, r) {
     if ('object' != _typeof(t) || !t) {
@@ -428,7 +402,7 @@
       return o && 'function' == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? 'symbol' : typeof o;
     }, _typeof(o);
   }
-  (function(_globalThis$process, _r, _g, _b, _red, _green, _blue, _add) {
+  (function(_Class_brand, _path, _space, _r, _g, _b, _red, _green, _blue, _Class3_brand) {
     var __create = Object.create;
     var __defProp = Object.defineProperty;
     var __getProtoOf = Object.getPrototypeOf;
@@ -4144,7 +4118,7 @@
       var min = Math.min;
       var pow = Math.pow;
       var round = Math.round;
-      function clamp4(v, minimum, max2) {
+      function clamp3(v, minimum, max2) {
         return v < minimum ? minimum : v > max2 ? max2 : v;
       }
       var getOwnPropNames = Object.getOwnPropertyNames || function(o) {
@@ -4544,8 +4518,8 @@
             if (end < 0) {
               end = this.length + end;
             }
-            start = clamp4(start, 0, this.length);
-            end = clamp4(end, 0, this.length);
+            start = clamp3(start, 0, this.length);
+            end = clamp3(end, 0, this.length);
             var len = end - start;
             if (len < 0) {
               len = 0;
@@ -5688,277 +5662,1135 @@
       var parent = require_values2();
       module.exports = parent;
     });
-    var require_doT = __commonJS(function(exports, module) {
-      (function() {
-        'use strict';
-        var doT3 = {
-          name: 'doT',
-          version: '1.1.1',
-          templateSettings: {
-            evaluate: /\{\{([\s\S]+?(\}?)+)\}\}/g,
-            interpolate: /\{\{=([\s\S]+?)\}\}/g,
-            encode: /\{\{!([\s\S]+?)\}\}/g,
-            use: /\{\{#([\s\S]+?)\}\}/g,
-            useParams: /(^|[^\w$])def(?:\.|\[[\'\"])([\w$\.]+)(?:[\'\"]\])?\s*\:\s*([\w$\.]+|\"[^\"]+\"|\'[^\']+\'|\{[^\}]+\})/g,
-            define: /\{\{##\s*([\w\.$]+)\s*(\:|=)([\s\S]+?)#\}\}/g,
-            defineParams: /^\s*([\w$]+):([\s\S]+)/,
-            conditional: /\{\{\?(\?)?\s*([\s\S]*?)\s*\}\}/g,
-            iterate: /\{\{~\s*(?:\}\}|([\s\S]+?)\s*\:\s*([\w$]+)\s*(?:\:\s*([\w$]+))?\s*\}\})/g,
-            varname: 'it',
-            strip: true,
-            append: true,
-            selfcontained: false,
-            doNotSkipEncoded: false
-          },
-          template: void 0,
-          compile: void 0,
-          log: true
+    var require_to_string_tag_support = __commonJS(function(exports, module) {
+      'use strict';
+      var wellKnownSymbol = require_well_known_symbol();
+      var TO_STRING_TAG = wellKnownSymbol('toStringTag');
+      var test = {};
+      test[TO_STRING_TAG] = 'z';
+      module.exports = String(test) === '[object z]';
+    });
+    var require_classof = __commonJS(function(exports, module) {
+      'use strict';
+      var TO_STRING_TAG_SUPPORT = require_to_string_tag_support();
+      var isCallable = require_is_callable2();
+      var classofRaw = require_classof_raw();
+      var wellKnownSymbol = require_well_known_symbol();
+      var TO_STRING_TAG = wellKnownSymbol('toStringTag');
+      var $Object = Object;
+      var CORRECT_ARGUMENTS = classofRaw(function() {
+        return arguments;
+      }()) === 'Arguments';
+      var tryGet = function tryGet(it, key) {
+        try {
+          return it[key];
+        } catch (error) {}
+      };
+      module.exports = TO_STRING_TAG_SUPPORT ? classofRaw : function(it) {
+        var O, tag, result;
+        return it === void 0 ? 'Undefined' : it === null ? 'Null' : typeof (tag = tryGet(O = $Object(it), TO_STRING_TAG)) == 'string' ? tag : CORRECT_ARGUMENTS ? classofRaw(O) : (result = classofRaw(O)) === 'Object' && isCallable(O.callee) ? 'Arguments' : result;
+      };
+    });
+    var require_to_string = __commonJS(function(exports, module) {
+      'use strict';
+      var classof = require_classof();
+      var $String = String;
+      module.exports = function(argument) {
+        if (classof(argument) === 'Symbol') {
+          throw new TypeError('Cannot convert a Symbol value to a string');
+        }
+        return $String(argument);
+      };
+    });
+    var require_string_multibyte = __commonJS(function(exports, module) {
+      'use strict';
+      var uncurryThis = require_function_uncurry_this();
+      var toIntegerOrInfinity = require_to_integer_or_infinity();
+      var toString = require_to_string();
+      var requireObjectCoercible = require_require_object_coercible();
+      var charAt = uncurryThis(''.charAt);
+      var charCodeAt = uncurryThis(''.charCodeAt);
+      var stringSlice = uncurryThis(''.slice);
+      var createMethod = function createMethod(CONVERT_TO_STRING) {
+        return function($this, pos) {
+          var S = toString(requireObjectCoercible($this));
+          var position = toIntegerOrInfinity(pos);
+          var size = S.length;
+          var first, second;
+          if (position < 0 || position >= size) {
+            return CONVERT_TO_STRING ? '' : void 0;
+          }
+          first = charCodeAt(S, position);
+          return first < 55296 || first > 56319 || position + 1 === size || (second = charCodeAt(S, position + 1)) < 56320 || second > 57343 ? CONVERT_TO_STRING ? charAt(S, position) : first : CONVERT_TO_STRING ? stringSlice(S, position, position + 2) : (first - 55296 << 10) + (second - 56320) + 65536;
         };
-        (function() {
-          if ((typeof globalThis === 'undefined' ? 'undefined' : _typeof(globalThis)) === 'object') {
-            return;
-          }
-          try {
-            Object.defineProperty(Object.prototype, '__magic__', {
-              get: function get() {
-                return this;
-              },
-              configurable: true
-            });
-            __magic__.globalThis = __magic__;
-            delete Object.prototype.__magic__;
-          } catch (e) {
-            window.globalThis = function() {
-              if (typeof self !== 'undefined') {
-                return self;
-              }
-              if (typeof window !== 'undefined') {
-                return window;
-              }
-              if (typeof global !== 'undefined') {
-                return global;
-              }
-              if (typeof this !== 'undefined') {
-                return this;
-              }
-              throw new Error('Unable to locate global `this`');
-            }();
+      };
+      module.exports = {
+        codeAt: createMethod(false),
+        charAt: createMethod(true)
+      };
+    });
+    var require_weak_map_basic_detection = __commonJS(function(exports, module) {
+      'use strict';
+      var global2 = require_global();
+      var isCallable = require_is_callable2();
+      var WeakMap2 = global2.WeakMap;
+      module.exports = isCallable(WeakMap2) && /native code/.test(String(WeakMap2));
+    });
+    var require_internal_state = __commonJS(function(exports, module) {
+      'use strict';
+      var NATIVE_WEAK_MAP = require_weak_map_basic_detection();
+      var global2 = require_global();
+      var isObject = require_is_object2();
+      var createNonEnumerableProperty = require_create_non_enumerable_property();
+      var hasOwn2 = require_has_own_property();
+      var shared = require_shared_store();
+      var sharedKey = require_shared_key();
+      var hiddenKeys = require_hidden_keys();
+      var OBJECT_ALREADY_INITIALIZED = 'Object already initialized';
+      var TypeError2 = global2.TypeError;
+      var WeakMap2 = global2.WeakMap;
+      var set2;
+      var get2;
+      var has;
+      var enforce = function enforce(it) {
+        return has(it) ? get2(it) : set2(it, {});
+      };
+      var getterFor = function getterFor(TYPE) {
+        return function(it) {
+          var state;
+          if (!isObject(it) || (state = get2(it)).type !== TYPE) {
+            throw new TypeError2('Incompatible receiver, ' + TYPE + ' required');
           }
-        })();
-        doT3.encodeHTMLSource = function(doNotSkipEncoded) {
-          var encodeHTMLRules = {
-            '&': '&',
-            '<': '<',
-            '>': '>',
-            '"': '"',
-            '\'': ''',
-            '/': '/'
-          }, matchHTML = doNotSkipEncoded ? /[&<>"'\/]/g : /&(?!#?\w+;)|<|>|"|'|\//g;
-          return function(code) {
-            return code ? code.toString().replace(matchHTML, function(m3) {
-              return encodeHTMLRules[m3] || m3;
-            }) : '';
-          };
+          return state;
         };
-        if (typeof module !== 'undefined' && module.exports) {
-          module.exports = doT3;
-        } else if (typeof define === 'function' && define.amd) {
-          define(function() {
-            return doT3;
-          });
+      };
+      if (NATIVE_WEAK_MAP || shared.state) {
+        store = shared.state || (shared.state = new WeakMap2());
+        store.get = store.get;
+        store.has = store.has;
+        store.set = store.set;
+        set2 = function set2(it, metadata) {
+          if (store.has(it)) {
+            throw new TypeError2(OBJECT_ALREADY_INITIALIZED);
+          }
+          metadata.facade = it;
+          store.set(it, metadata);
+          return metadata;
+        };
+        get2 = function get2(it) {
+          return store.get(it) || {};
+        };
+        has = function has(it) {
+          return store.has(it);
+        };
+      } else {
+        STATE = sharedKey('state');
+        hiddenKeys[STATE] = true;
+        set2 = function set2(it, metadata) {
+          if (hasOwn2(it, STATE)) {
+            throw new TypeError2(OBJECT_ALREADY_INITIALIZED);
+          }
+          metadata.facade = it;
+          createNonEnumerableProperty(it, STATE, metadata);
+          return metadata;
+        };
+        get2 = function get2(it) {
+          return hasOwn2(it, STATE) ? it[STATE] : {};
+        };
+        has = function has(it) {
+          return hasOwn2(it, STATE);
+        };
+      }
+      var store;
+      var STATE;
+      module.exports = {
+        set: set2,
+        get: get2,
+        has: has,
+        enforce: enforce,
+        getterFor: getterFor
+      };
+    });
+    var require_function_name = __commonJS(function(exports, module) {
+      'use strict';
+      var DESCRIPTORS = require_descriptors();
+      var hasOwn2 = require_has_own_property();
+      var FunctionPrototype = Function.prototype;
+      var getDescriptor = DESCRIPTORS && Object.getOwnPropertyDescriptor;
+      var EXISTS = hasOwn2(FunctionPrototype, 'name');
+      var PROPER = EXISTS && function something() {}.name === 'something';
+      var CONFIGURABLE = EXISTS && (!DESCRIPTORS || DESCRIPTORS && getDescriptor(FunctionPrototype, 'name').configurable);
+      module.exports = {
+        EXISTS: EXISTS,
+        PROPER: PROPER,
+        CONFIGURABLE: CONFIGURABLE
+      };
+    });
+    var require_object_define_properties = __commonJS(function(exports) {
+      'use strict';
+      var DESCRIPTORS = require_descriptors();
+      var V8_PROTOTYPE_DEFINE_BUG = require_v8_prototype_define_bug();
+      var definePropertyModule = require_object_define_property();
+      var anObject = require_an_object();
+      var toIndexedObject = require_to_indexed_object();
+      var objectKeys = require_object_keys();
+      exports.f = DESCRIPTORS && !V8_PROTOTYPE_DEFINE_BUG ? Object.defineProperties : function defineProperties(O, Properties) {
+        anObject(O);
+        var props = toIndexedObject(Properties);
+        var keys = objectKeys(Properties);
+        var length = keys.length;
+        var index = 0;
+        var key;
+        while (length > index) {
+          definePropertyModule.f(O, key = keys[index++], props[key]);
+        }
+        return O;
+      };
+    });
+    var require_html = __commonJS(function(exports, module) {
+      'use strict';
+      var getBuiltIn = require_get_built_in();
+      module.exports = getBuiltIn('document', 'documentElement');
+    });
+    var require_object_create = __commonJS(function(exports, module) {
+      'use strict';
+      var anObject = require_an_object();
+      var definePropertiesModule = require_object_define_properties();
+      var enumBugKeys = require_enum_bug_keys();
+      var hiddenKeys = require_hidden_keys();
+      var html = require_html();
+      var documentCreateElement = require_document_create_element();
+      var sharedKey = require_shared_key();
+      var GT = '>';
+      var LT = '<';
+      var PROTOTYPE = 'prototype';
+      var SCRIPT = 'script';
+      var IE_PROTO = sharedKey('IE_PROTO');
+      var EmptyConstructor = function EmptyConstructor() {};
+      var scriptTag = function scriptTag(content) {
+        return LT + SCRIPT + GT + content + LT + '/' + SCRIPT + GT;
+      };
+      var NullProtoObjectViaActiveX = function NullProtoObjectViaActiveX(activeXDocument2) {
+        activeXDocument2.write(scriptTag(''));
+        activeXDocument2.close();
+        var temp = activeXDocument2.parentWindow.Object;
+        activeXDocument2 = null;
+        return temp;
+      };
+      var NullProtoObjectViaIFrame = function NullProtoObjectViaIFrame() {
+        var iframe = documentCreateElement('iframe');
+        var JS = 'java' + SCRIPT + ':';
+        var iframeDocument;
+        iframe.style.display = 'none';
+        html.appendChild(iframe);
+        iframe.src = String(JS);
+        iframeDocument = iframe.contentWindow.document;
+        iframeDocument.open();
+        iframeDocument.write(scriptTag('document.F=Object'));
+        iframeDocument.close();
+        return iframeDocument.F;
+      };
+      var activeXDocument;
+      var _NullProtoObject = function NullProtoObject() {
+        try {
+          activeXDocument = new ActiveXObject('htmlfile');
+        } catch (error) {}
+        _NullProtoObject = typeof document != 'undefined' ? document.domain && activeXDocument ? NullProtoObjectViaActiveX(activeXDocument) : NullProtoObjectViaIFrame() : NullProtoObjectViaActiveX(activeXDocument);
+        var length = enumBugKeys.length;
+        while (length--) {
+          delete _NullProtoObject[PROTOTYPE][enumBugKeys[length]];
+        }
+        return _NullProtoObject();
+      };
+      hiddenKeys[IE_PROTO] = true;
+      module.exports = Object.create || function create(O, Properties) {
+        var result;
+        if (O !== null) {
+          EmptyConstructor[PROTOTYPE] = anObject(O);
+          result = new EmptyConstructor();
+          EmptyConstructor[PROTOTYPE] = null;
+          result[IE_PROTO] = O;
         } else {
-          globalThis.doT = doT3;
+          result = _NullProtoObject();
         }
-        var startend = {
-          append: {
-            start: '\'+(',
-            end: ')+\'',
-            startencode: '\'+encodeHTML('
-          },
-          split: {
-            start: '\';out+=(',
-            end: ');out+=\'',
-            startencode: '\';out+=encodeHTML('
+        return Properties === void 0 ? result : definePropertiesModule.f(result, Properties);
+      };
+    });
+    var require_define_built_in = __commonJS(function(exports, module) {
+      'use strict';
+      var createNonEnumerableProperty = require_create_non_enumerable_property();
+      module.exports = function(target, key, value, options) {
+        if (options && options.enumerable) {
+          target[key] = value;
+        } else {
+          createNonEnumerableProperty(target, key, value);
+        }
+        return target;
+      };
+    });
+    var require_iterators_core = __commonJS(function(exports, module) {
+      'use strict';
+      var fails = require_fails();
+      var isCallable = require_is_callable2();
+      var isObject = require_is_object2();
+      var create = require_object_create();
+      var getPrototypeOf = require_object_get_prototype_of();
+      var defineBuiltIn = require_define_built_in();
+      var wellKnownSymbol = require_well_known_symbol();
+      var IS_PURE = require_is_pure();
+      var ITERATOR = wellKnownSymbol('iterator');
+      var BUGGY_SAFARI_ITERATORS = false;
+      var IteratorPrototype;
+      var PrototypeOfArrayIteratorPrototype;
+      var arrayIterator;
+      if ([].keys) {
+        arrayIterator = [].keys();
+        if (!('next' in arrayIterator)) {
+          BUGGY_SAFARI_ITERATORS = true;
+        } else {
+          PrototypeOfArrayIteratorPrototype = getPrototypeOf(getPrototypeOf(arrayIterator));
+          if (PrototypeOfArrayIteratorPrototype !== Object.prototype) {
+            IteratorPrototype = PrototypeOfArrayIteratorPrototype;
           }
-        }, skip = /$^/;
-        function resolveDefs(c4, block, def) {
-          return (typeof block === 'string' ? block : block.toString()).replace(c4.define || skip, function(m3, code, assign, value) {
-            if (code.indexOf('def.') === 0) {
-              code = code.substring(4);
-            }
-            if (!(code in def)) {
-              if (assign === ':') {
-                if (c4.defineParams) {
-                  value.replace(c4.defineParams, function(m4, param, v) {
-                    def[code] = {
-                      arg: param,
-                      text: v
-                    };
-                  });
-                }
-                if (!(code in def)) {
-                  def[code] = value;
-                }
-              } else {
-                new Function('def', 'def[\'' + code + '\']=' + value)(def);
-              }
-            }
-            return '';
-          }).replace(c4.use || skip, function(m3, code) {
-            if (c4.useParams) {
-              code = code.replace(c4.useParams, function(m4, s, d2, param) {
-                if (def[d2] && def[d2].arg && param) {
-                  var rw = (d2 + ':' + param).replace(/'|\\/g, '_');
-                  def.__exp = def.__exp || {};
-                  def.__exp[rw] = def[d2].text.replace(new RegExp('(^|[^\\w$])' + def[d2].arg + '([^\\w$])', 'g'), '$1' + param + '$2');
-                  return s + 'def.__exp[\'' + rw + '\']';
-                }
-              });
-            }
-            var v = new Function('def', 'return ' + code)(def);
-            return v ? resolveDefs(c4, v, def) : v;
-          });
         }
-        function unescape(code) {
-          return code.replace(/\\('|\\)/g, '$1').replace(/[\r\t\n]/g, ' ');
+      }
+      var NEW_ITERATOR_PROTOTYPE = !isObject(IteratorPrototype) || fails(function() {
+        var test = {};
+        return IteratorPrototype[ITERATOR].call(test) !== test;
+      });
+      if (NEW_ITERATOR_PROTOTYPE) {
+        IteratorPrototype = {};
+      } else if (IS_PURE) {
+        IteratorPrototype = create(IteratorPrototype);
+      }
+      if (!isCallable(IteratorPrototype[ITERATOR])) {
+        defineBuiltIn(IteratorPrototype, ITERATOR, function() {
+          return this;
+        });
+      }
+      module.exports = {
+        IteratorPrototype: IteratorPrototype,
+        BUGGY_SAFARI_ITERATORS: BUGGY_SAFARI_ITERATORS
+      };
+    });
+    var require_object_to_string = __commonJS(function(exports, module) {
+      'use strict';
+      var TO_STRING_TAG_SUPPORT = require_to_string_tag_support();
+      var classof = require_classof();
+      module.exports = TO_STRING_TAG_SUPPORT ? {}.toString : function toString() {
+        return '[object ' + classof(this) + ']';
+      };
+    });
+    var require_set_to_string_tag = __commonJS(function(exports, module) {
+      'use strict';
+      var TO_STRING_TAG_SUPPORT = require_to_string_tag_support();
+      var defineProperty = require_object_define_property().f;
+      var createNonEnumerableProperty = require_create_non_enumerable_property();
+      var hasOwn2 = require_has_own_property();
+      var toString = require_object_to_string();
+      var wellKnownSymbol = require_well_known_symbol();
+      var TO_STRING_TAG = wellKnownSymbol('toStringTag');
+      module.exports = function(it, TAG, STATIC, SET_METHOD) {
+        if (it) {
+          var target = STATIC ? it : it.prototype;
+          if (!hasOwn2(target, TO_STRING_TAG)) {
+            defineProperty(target, TO_STRING_TAG, {
+              configurable: true,
+              value: TAG
+            });
+          }
+          if (SET_METHOD && !TO_STRING_TAG_SUPPORT) {
+            createNonEnumerableProperty(target, 'toString', toString);
+          }
         }
-        doT3.template = function(tmpl, c4, def) {
-          c4 = c4 || doT3.templateSettings;
-          var cse = c4.append ? startend.append : startend.split, needhtmlencode, sid = 0, indv, str = c4.use || c4.define ? resolveDefs(c4, tmpl, def || {}) : tmpl;
-          str = ('var out=\'' + (c4.strip ? str.replace(/(^|\r|\n)\t* +| +\t*(\r|\n|$)/g, ' ').replace(/\r|\n|\t|\/\*[\s\S]*?\*\//g, '') : str).replace(/'|\\/g, '\\$&').replace(c4.interpolate || skip, function(m3, code) {
-            return cse.start + unescape(code) + cse.end;
-          }).replace(c4.encode || skip, function(m3, code) {
-            needhtmlencode = true;
-            return cse.startencode + unescape(code) + cse.end;
-          }).replace(c4.conditional || skip, function(m3, elsecase, code) {
-            return elsecase ? code ? '\';}else if(' + unescape(code) + '){out+=\'' : '\';}else{out+=\'' : code ? '\';if(' + unescape(code) + '){out+=\'' : '\';}out+=\'';
-          }).replace(c4.iterate || skip, function(m3, iterate, vname, iname) {
-            if (!iterate) {
-              return '\';} } out+=\'';
+      };
+    });
+    var require_iterators = __commonJS(function(exports, module) {
+      'use strict';
+      module.exports = {};
+    });
+    var require_iterator_create_constructor = __commonJS(function(exports, module) {
+      'use strict';
+      var IteratorPrototype = require_iterators_core().IteratorPrototype;
+      var create = require_object_create();
+      var createPropertyDescriptor = require_create_property_descriptor();
+      var setToStringTag = require_set_to_string_tag();
+      var Iterators = require_iterators();
+      var returnThis = function returnThis() {
+        return this;
+      };
+      module.exports = function(IteratorConstructor, NAME, next, ENUMERABLE_NEXT) {
+        var TO_STRING_TAG = NAME + ' Iterator';
+        IteratorConstructor.prototype = create(IteratorPrototype, {
+          next: createPropertyDescriptor(+!ENUMERABLE_NEXT, next)
+        });
+        setToStringTag(IteratorConstructor, TO_STRING_TAG, false, true);
+        Iterators[TO_STRING_TAG] = returnThis;
+        return IteratorConstructor;
+      };
+    });
+    var require_function_uncurry_this_accessor = __commonJS(function(exports, module) {
+      'use strict';
+      var uncurryThis = require_function_uncurry_this();
+      var aCallable = require_a_callable();
+      module.exports = function(object, key, method) {
+        try {
+          return uncurryThis(aCallable(Object.getOwnPropertyDescriptor(object, key)[method]));
+        } catch (error) {}
+      };
+    });
+    var require_a_possible_prototype = __commonJS(function(exports, module) {
+      'use strict';
+      var isCallable = require_is_callable2();
+      var $String = String;
+      var $TypeError = TypeError;
+      module.exports = function(argument) {
+        if (_typeof(argument) == 'object' || isCallable(argument)) {
+          return argument;
+        }
+        throw new $TypeError('Can\'t set ' + $String(argument) + ' as a prototype');
+      };
+    });
+    var require_object_set_prototype_of = __commonJS(function(exports, module) {
+      'use strict';
+      var uncurryThisAccessor = require_function_uncurry_this_accessor();
+      var anObject = require_an_object();
+      var aPossiblePrototype = require_a_possible_prototype();
+      module.exports = Object.setPrototypeOf || ('__proto__' in {} ? function() {
+        var CORRECT_SETTER = false;
+        var test = {};
+        var setter;
+        try {
+          setter = uncurryThisAccessor(Object.prototype, '__proto__', 'set');
+          setter(test, []);
+          CORRECT_SETTER = test instanceof Array;
+        } catch (error) {}
+        return function setPrototypeOf(O, proto) {
+          anObject(O);
+          aPossiblePrototype(proto);
+          if (CORRECT_SETTER) {
+            setter(O, proto);
+          } else {
+            O.__proto__ = proto;
+          }
+          return O;
+        };
+      }() : void 0);
+    });
+    var require_iterator_define = __commonJS(function(exports, module) {
+      'use strict';
+      var $ = require_export();
+      var call = require_function_call();
+      var IS_PURE = require_is_pure();
+      var FunctionName = require_function_name();
+      var isCallable = require_is_callable2();
+      var createIteratorConstructor = require_iterator_create_constructor();
+      var getPrototypeOf = require_object_get_prototype_of();
+      var setPrototypeOf = require_object_set_prototype_of();
+      var setToStringTag = require_set_to_string_tag();
+      var createNonEnumerableProperty = require_create_non_enumerable_property();
+      var defineBuiltIn = require_define_built_in();
+      var wellKnownSymbol = require_well_known_symbol();
+      var Iterators = require_iterators();
+      var IteratorsCore = require_iterators_core();
+      var PROPER_FUNCTION_NAME = FunctionName.PROPER;
+      var CONFIGURABLE_FUNCTION_NAME = FunctionName.CONFIGURABLE;
+      var IteratorPrototype = IteratorsCore.IteratorPrototype;
+      var BUGGY_SAFARI_ITERATORS = IteratorsCore.BUGGY_SAFARI_ITERATORS;
+      var ITERATOR = wellKnownSymbol('iterator');
+      var KEYS = 'keys';
+      var VALUES = 'values';
+      var ENTRIES = 'entries';
+      var returnThis = function returnThis() {
+        return this;
+      };
+      module.exports = function(Iterable, NAME, IteratorConstructor, next, DEFAULT, IS_SET, FORCED) {
+        createIteratorConstructor(IteratorConstructor, NAME, next);
+        var getIterationMethod = function getIterationMethod(KIND) {
+          if (KIND === DEFAULT && defaultIterator) {
+            return defaultIterator;
+          }
+          if (!BUGGY_SAFARI_ITERATORS && KIND && KIND in IterablePrototype) {
+            return IterablePrototype[KIND];
+          }
+          switch (KIND) {
+           case KEYS:
+            return function keys() {
+              return new IteratorConstructor(this, KIND);
+            };
+
+           case VALUES:
+            return function values2() {
+              return new IteratorConstructor(this, KIND);
+            };
+
+           case ENTRIES:
+            return function entries() {
+              return new IteratorConstructor(this, KIND);
+            };
+          }
+          return function() {
+            return new IteratorConstructor(this);
+          };
+        };
+        var TO_STRING_TAG = NAME + ' Iterator';
+        var INCORRECT_VALUES_NAME = false;
+        var IterablePrototype = Iterable.prototype;
+        var nativeIterator = IterablePrototype[ITERATOR] || IterablePrototype['@@iterator'] || DEFAULT && IterablePrototype[DEFAULT];
+        var defaultIterator = !BUGGY_SAFARI_ITERATORS && nativeIterator || getIterationMethod(DEFAULT);
+        var anyNativeIterator = NAME === 'Array' ? IterablePrototype.entries || nativeIterator : nativeIterator;
+        var CurrentIteratorPrototype, methods, KEY;
+        if (anyNativeIterator) {
+          CurrentIteratorPrototype = getPrototypeOf(anyNativeIterator.call(new Iterable()));
+          if (CurrentIteratorPrototype !== Object.prototype && CurrentIteratorPrototype.next) {
+            if (!IS_PURE && getPrototypeOf(CurrentIteratorPrototype) !== IteratorPrototype) {
+              if (setPrototypeOf) {
+                setPrototypeOf(CurrentIteratorPrototype, IteratorPrototype);
+              } else if (!isCallable(CurrentIteratorPrototype[ITERATOR])) {
+                defineBuiltIn(CurrentIteratorPrototype, ITERATOR, returnThis);
+              }
             }
-            sid += 1;
-            indv = iname || 'i' + sid;
-            iterate = unescape(iterate);
-            return '\';var arr' + sid + '=' + iterate + ';if(arr' + sid + '){var ' + vname + ',' + indv + '=-1,l' + sid + '=arr' + sid + '.length-1;while(' + indv + '',
-      sameOrigin: ''
-    };
-    definitions.forEach(function(definition) {
-      var name = definition.name;
-      var value = definition.value;
-      var priority = definition.priority;
-      var group = definition.group;
-      constants[name] = value;
-      constants[name + '_PRIO'] = priority;
-      constants[name + '_GROUP'] = group;
-      constants.results[priority] = value;
-      constants.resultGroups[priority] = group;
-      constants.resultGroupMap[value] = group;
+    var require_es_string_iterator = __commonJS(function() {
+      'use strict';
+      var charAt = require_string_multibyte().charAt;
+      var toString = require_to_string();
+      var InternalStateModule = require_internal_state();
+      var defineIterator = require_iterator_define();
+      var createIterResultObject = require_create_iter_result_object();
+      var STRING_ITERATOR = 'String Iterator';
+      var setInternalState = InternalStateModule.set;
+      var getInternalState = InternalStateModule.getterFor(STRING_ITERATOR);
+      defineIterator(String, 'String', function(iterated) {
+        setInternalState(this, {
+          type: STRING_ITERATOR,
+          string: toString(iterated),
+          index: 0
+        });
+      }, function next() {
+        var state = getInternalState(this);
+        var string = state.string;
+        var index = state.index;
+        var point;
+        if (index >= string.length) {
+          return createIterResultObject(void 0, true);
+        }
+        point = charAt(string, index);
+        state.index += point.length;
+        return createIterResultObject(point, false);
+      });
     });
-    Object.freeze(constants.results);
-    Object.freeze(constants.resultGroups);
-    Object.freeze(constants.resultGroupMap);
-    Object.freeze(constants);
-    var constants_default = constants;
-    function log() {
-      if ((typeof console === 'undefined' ? 'undefined' : _typeof(console)) === 'object' && console.log) {
-        Function.prototype.apply.call(console.log, console, arguments);
-      }
-    }
-    var log_default = log;
-    var whitespaceRegex = /[\t\r\n\f]/g;
-    var AbstractVirtualNode = function() {
-      function AbstractVirtualNode() {
-        _classCallCheck(this, AbstractVirtualNode);
-        this.parent = void 0;
-      }
-      _createClass(AbstractVirtualNode, [ {
-        key: 'props',
-        get: function get() {
-          throw new Error('VirtualNode class must have a "props" object consisting of "nodeType" and "nodeName" properties');
+    var require_iterator_close = __commonJS(function(exports, module) {
+      'use strict';
+      var call = require_function_call();
+      var anObject = require_an_object();
+      var getMethod = require_get_method();
+      module.exports = function(iterator, kind, value) {
+        var innerResult, innerError;
+        anObject(iterator);
+        try {
+          innerResult = getMethod(iterator, 'return');
+          if (!innerResult) {
+            if (kind === 'throw') {
+              throw value;
+            }
+            return value;
+          }
+          innerResult = call(innerResult, iterator);
+        } catch (error) {
+          innerError = true;
+          innerResult = error;
         }
-      }, {
-        key: 'attrNames',
-        get: function get() {
-          throw new Error('VirtualNode class must have an "attrNames" property');
+        if (kind === 'throw') {
+          throw value;
         }
-      }, {
-        key: 'attr',
-        value: function attr() {
-          throw new Error('VirtualNode class must have an "attr" function');
+        if (innerError) {
+          throw innerResult;
         }
-      }, {
-        key: 'hasAttr',
-        value: function hasAttr() {
-          throw new Error('VirtualNode class must have a "hasAttr" function');
+        anObject(innerResult);
+        return value;
+      };
+    });
+    var require_call_with_safe_iteration_closing = __commonJS(function(exports, module) {
+      'use strict';
+      var anObject = require_an_object();
+      var iteratorClose = require_iterator_close();
+      module.exports = function(iterator, fn, value, ENTRIES) {
+        try {
+          return ENTRIES ? fn(anObject(value)[0], value[1]) : fn(value);
+        } catch (error) {
+          iteratorClose(iterator, 'throw', error);
         }
-      }, {
-        key: 'hasClass',
-        value: function hasClass(className) {
-          var classAttr = this.attr('class');
-          if (!classAttr) {
-            return false;
-          }
-          var selector = ' ' + className + ' ';
-          return (' ' + classAttr + ' ').replace(whitespaceRegex, ' ').indexOf(selector) >= 0;
+      };
+    });
+    var require_is_array_iterator_method = __commonJS(function(exports, module) {
+      'use strict';
+      var wellKnownSymbol = require_well_known_symbol();
+      var Iterators = require_iterators();
+      var ITERATOR = wellKnownSymbol('iterator');
+      var ArrayPrototype = Array.prototype;
+      module.exports = function(it) {
+        return it !== void 0 && (Iterators.Array === it || ArrayPrototype[ITERATOR] === it);
+      };
+    });
+    var require_inspect_source = __commonJS(function(exports, module) {
+      'use strict';
+      var uncurryThis = require_function_uncurry_this();
+      var isCallable = require_is_callable2();
+      var store = require_shared_store();
+      var functionToString = uncurryThis(Function.toString);
+      if (!isCallable(store.inspectSource)) {
+        store.inspectSource = function(it) {
+          return functionToString(it);
+        };
+      }
+      module.exports = store.inspectSource;
+    });
+    var require_is_constructor = __commonJS(function(exports, module) {
+      'use strict';
+      var uncurryThis = require_function_uncurry_this();
+      var fails = require_fails();
+      var isCallable = require_is_callable2();
+      var classof = require_classof();
+      var getBuiltIn = require_get_built_in();
+      var inspectSource = require_inspect_source();
+      var noop3 = function noop3() {};
+      var empty = [];
+      var construct = getBuiltIn('Reflect', 'construct');
+      var constructorRegExp = /^\s*(?:class|function)\b/;
+      var exec = uncurryThis(constructorRegExp.exec);
+      var INCORRECT_TO_STRING = !constructorRegExp.test(noop3);
+      var isConstructorModern = function isConstructor(argument) {
+        if (!isCallable(argument)) {
+          return false;
+        }
+        try {
+          construct(noop3, empty, argument);
+          return true;
+        } catch (error) {
+          return false;
+        }
+      };
+      var isConstructorLegacy = function isConstructor(argument) {
+        if (!isCallable(argument)) {
+          return false;
+        }
+        switch (classof(argument)) {
+         case 'AsyncFunction':
+         case 'GeneratorFunction':
+         case 'AsyncGeneratorFunction':
+          return false;
+        }
+        try {
+          return INCORRECT_TO_STRING || !!exec(constructorRegExp, inspectSource(argument));
+        } catch (error) {
+          return true;
+        }
+      };
+      isConstructorLegacy.sham = true;
+      module.exports = !construct || fails(function() {
+        var called;
+        return isConstructorModern(isConstructorModern.call) || !isConstructorModern(Object) || !isConstructorModern(function() {
+          called = true;
+        }) || called;
+      }) ? isConstructorLegacy : isConstructorModern;
+    });
+    var require_create_property = __commonJS(function(exports, module) {
+      'use strict';
+      var toPropertyKey = require_to_property_key();
+      var definePropertyModule = require_object_define_property();
+      var createPropertyDescriptor = require_create_property_descriptor();
+      module.exports = function(object, key, value) {
+        var propertyKey = toPropertyKey(key);
+        if (propertyKey in object) {
+          definePropertyModule.f(object, propertyKey, createPropertyDescriptor(0, value));
+        } else {
+          object[propertyKey] = value;
+        }
+      };
+    });
+    var require_get_iterator_method = __commonJS(function(exports, module) {
+      'use strict';
+      var classof = require_classof();
+      var getMethod = require_get_method();
+      var isNullOrUndefined = require_is_null_or_undefined();
+      var Iterators = require_iterators();
+      var wellKnownSymbol = require_well_known_symbol();
+      var ITERATOR = wellKnownSymbol('iterator');
+      module.exports = function(it) {
+        if (!isNullOrUndefined(it)) {
+          return getMethod(it, ITERATOR) || getMethod(it, '@@iterator') || Iterators[classof(it)];
+        }
+      };
+    });
+    var require_get_iterator = __commonJS(function(exports, module) {
+      'use strict';
+      var call = require_function_call();
+      var aCallable = require_a_callable();
+      var anObject = require_an_object();
+      var tryToString = require_try_to_string();
+      var getIteratorMethod = require_get_iterator_method();
+      var $TypeError = TypeError;
+      module.exports = function(argument, usingIterator) {
+        var iteratorMethod = arguments.length < 2 ? getIteratorMethod(argument) : usingIterator;
+        if (aCallable(iteratorMethod)) {
+          return anObject(call(iteratorMethod, argument));
+        }
+        throw new $TypeError(tryToString(argument) + ' is not iterable');
+      };
+    });
+    var require_array_from = __commonJS(function(exports, module) {
+      'use strict';
+      var bind = require_function_bind_context();
+      var call = require_function_call();
+      var toObject = require_to_object();
+      var callWithSafeIterationClosing = require_call_with_safe_iteration_closing();
+      var isArrayIteratorMethod = require_is_array_iterator_method();
+      var isConstructor = require_is_constructor();
+      var lengthOfArrayLike = require_length_of_array_like();
+      var createProperty = require_create_property();
+      var getIterator = require_get_iterator();
+      var getIteratorMethod = require_get_iterator_method();
+      var $Array = Array;
+      module.exports = function from(arrayLike) {
+        var O = toObject(arrayLike);
+        var IS_CONSTRUCTOR = isConstructor(this);
+        var argumentsLength = arguments.length;
+        var mapfn = argumentsLength > 1 ? arguments[1] : void 0;
+        var mapping = mapfn !== void 0;
+        if (mapping) {
+          mapfn = bind(mapfn, argumentsLength > 2 ? arguments[2] : void 0);
+        }
+        var iteratorMethod = getIteratorMethod(O);
+        var index = 0;
+        var length, result, step, iterator, next, value;
+        if (iteratorMethod && !(this === $Array && isArrayIteratorMethod(iteratorMethod))) {
+          iterator = getIterator(O, iteratorMethod);
+          next = iterator.next;
+          result = IS_CONSTRUCTOR ? new this() : [];
+          for (;!(step = call(next, iterator)).done; index++) {
+            value = mapping ? callWithSafeIterationClosing(iterator, mapfn, [ step.value, index ], true) : step.value;
+            createProperty(result, index, value);
+          }
+        } else {
+          length = lengthOfArrayLike(O);
+          result = IS_CONSTRUCTOR ? new this(length) : $Array(length);
+          for (;length > index; index++) {
+            value = mapping ? mapfn(O[index], index) : O[index];
+            createProperty(result, index, value);
+          }
+        }
+        result.length = index;
+        return result;
+      };
+    });
+    var require_check_correctness_of_iteration = __commonJS(function(exports, module) {
+      'use strict';
+      var wellKnownSymbol = require_well_known_symbol();
+      var ITERATOR = wellKnownSymbol('iterator');
+      var SAFE_CLOSING = false;
+      try {
+        called = 0;
+        iteratorWithReturn = {
+          next: function next() {
+            return {
+              done: !!called++
+            };
+          },
+          return: function _return() {
+            SAFE_CLOSING = true;
+          }
+        };
+        iteratorWithReturn[ITERATOR] = function() {
+          return this;
+        };
+        Array.from(iteratorWithReturn, function() {
+          throw 2;
+        });
+      } catch (error) {}
+      var called;
+      var iteratorWithReturn;
+      module.exports = function(exec, SKIP_CLOSING) {
+        try {
+          if (!SKIP_CLOSING && !SAFE_CLOSING) {
+            return false;
+          }
+        } catch (error) {
+          return false;
+        }
+        var ITERATION_SUPPORT = false;
+        try {
+          var object = {};
+          object[ITERATOR] = function() {
+            return {
+              next: function next() {
+                return {
+                  done: ITERATION_SUPPORT = true
+                };
+              }
+            };
+          };
+          exec(object);
+        } catch (error) {}
+        return ITERATION_SUPPORT;
+      };
+    });
+    var require_es_array_from = __commonJS(function() {
+      'use strict';
+      var $ = require_export();
+      var from = require_array_from();
+      var checkCorrectnessOfIteration = require_check_correctness_of_iteration();
+      var INCORRECT_ITERATION = !checkCorrectnessOfIteration(function(iterable) {
+        Array.from(iterable);
+      });
+      $({
+        target: 'Array',
+        stat: true,
+        forced: INCORRECT_ITERATION
+      }, {
+        from: from
+      });
+    });
+    var require_from2 = __commonJS(function(exports, module) {
+      'use strict';
+      require_es_string_iterator();
+      require_es_array_from();
+      var path = require_path();
+      module.exports = path.Array.from;
+    });
+    var require_from3 = __commonJS(function(exports, module) {
+      'use strict';
+      var parent = require_from2();
+      module.exports = parent;
+    });
+    var require_from4 = __commonJS(function(exports, module) {
+      'use strict';
+      var parent = require_from3();
+      module.exports = parent;
+    });
+    var require_doT = __commonJS(function(exports, module) {
+      (function() {
+        'use strict';
+        var doT3 = {
+          name: 'doT',
+          version: '1.1.1',
+          templateSettings: {
+            evaluate: /\{\{([\s\S]+?(\}?)+)\}\}/g,
+            interpolate: /\{\{=([\s\S]+?)\}\}/g,
+            encode: /\{\{!([\s\S]+?)\}\}/g,
+            use: /\{\{#([\s\S]+?)\}\}/g,
+            useParams: /(^|[^\w$])def(?:\.|\[[\'\"])([\w$\.]+)(?:[\'\"]\])?\s*\:\s*([\w$\.]+|\"[^\"]+\"|\'[^\']+\'|\{[^\}]+\})/g,
+            define: /\{\{##\s*([\w\.$]+)\s*(\:|=)([\s\S]+?)#\}\}/g,
+            defineParams: /^\s*([\w$]+):([\s\S]+)/,
+            conditional: /\{\{\?(\?)?\s*([\s\S]*?)\s*\}\}/g,
+            iterate: /\{\{~\s*(?:\}\}|([\s\S]+?)\s*\:\s*([\w$]+)\s*(?:\:\s*([\w$]+))?\s*\}\})/g,
+            varname: 'it',
+            strip: true,
+            append: true,
+            selfcontained: false,
+            doNotSkipEncoded: false
+          },
+          template: void 0,
+          compile: void 0,
+          log: true
+        };
+        (function() {
+          if ((typeof globalThis === 'undefined' ? 'undefined' : _typeof(globalThis)) === 'object') {
+            return;
+          }
+          try {
+            Object.defineProperty(Object.prototype, '__magic__', {
+              get: function get() {
+                return this;
+              },
+              configurable: true
+            });
+            __magic__.globalThis = __magic__;
+            delete Object.prototype.__magic__;
+          } catch (e) {
+            window.globalThis = function() {
+              if (typeof self !== 'undefined') {
+                return self;
+              }
+              if (typeof window !== 'undefined') {
+                return window;
+              }
+              if (typeof global !== 'undefined') {
+                return global;
+              }
+              if (typeof this !== 'undefined') {
+                return this;
+              }
+              throw new Error('Unable to locate global `this`');
+            }();
+          }
+        })();
+        doT3.encodeHTMLSource = function(doNotSkipEncoded) {
+          var encodeHTMLRules = {
+            '&': '&',
+            '<': '<',
+            '>': '>',
+            '"': '"',
+            '\'': ''',
+            '/': '/'
+          }, matchHTML = doNotSkipEncoded ? /[&<>"'\/]/g : /&(?!#?\w+;)|<|>|"|'|\//g;
+          return function(code) {
+            return code ? code.toString().replace(matchHTML, function(m3) {
+              return encodeHTMLRules[m3] || m3;
+            }) : '';
+          };
+        };
+        if (typeof module !== 'undefined' && module.exports) {
+          module.exports = doT3;
+        } else if (typeof define === 'function' && define.amd) {
+          define(function() {
+            return doT3;
+          });
+        } else {
+          globalThis.doT = doT3;
+        }
+        var startend = {
+          append: {
+            start: '\'+(',
+            end: ')+\'',
+            startencode: '\'+encodeHTML('
+          },
+          split: {
+            start: '\';out+=(',
+            end: ');out+=\'',
+            startencode: '\';out+=encodeHTML('
+          }
+        }, skip = /$^/;
+        function resolveDefs(c4, block, def) {
+          return (typeof block === 'string' ? block : block.toString()).replace(c4.define || skip, function(m3, code, assign, value) {
+            if (code.indexOf('def.') === 0) {
+              code = code.substring(4);
+            }
+            if (!(code in def)) {
+              if (assign === ':') {
+                if (c4.defineParams) {
+                  value.replace(c4.defineParams, function(m4, param, v) {
+                    def[code] = {
+                      arg: param,
+                      text: v
+                    };
+                  });
+                }
+                if (!(code in def)) {
+                  def[code] = value;
+                }
+              } else {
+                new Function('def', 'def[\'' + code + '\']=' + value)(def);
+              }
+            }
+            return '';
+          }).replace(c4.use || skip, function(m3, code) {
+            if (c4.useParams) {
+              code = code.replace(c4.useParams, function(m4, s, d2, param) {
+                if (def[d2] && def[d2].arg && param) {
+                  var rw = (d2 + ':' + param).replace(/'|\\/g, '_');
+                  def.__exp = def.__exp || {};
+                  def.__exp[rw] = def[d2].text.replace(new RegExp('(^|[^\\w$])' + def[d2].arg + '([^\\w$])', 'g'), '$1' + param + '$2');
+                  return s + 'def.__exp[\'' + rw + '\']';
+                }
+              });
+            }
+            var v = new Function('def', 'return ' + code)(def);
+            return v ? resolveDefs(c4, v, def) : v;
+          });
+        }
+        function unescape(code) {
+          return code.replace(/\\('|\\)/g, '$1').replace(/[\r\t\n]/g, ' ');
+        }
+        doT3.template = function(tmpl, c4, def) {
+          c4 = c4 || doT3.templateSettings;
+          var cse = c4.append ? startend.append : startend.split, needhtmlencode, sid = 0, indv, str = c4.use || c4.define ? resolveDefs(c4, tmpl, def || {}) : tmpl;
+          str = ('var out=\'' + (c4.strip ? str.replace(/(^|\r|\n)\t* +| +\t*(\r|\n|$)/g, ' ').replace(/\r|\n|\t|\/\*[\s\S]*?\*\//g, '') : str).replace(/'|\\/g, '\\$&').replace(c4.interpolate || skip, function(m3, code) {
+            return cse.start + unescape(code) + cse.end;
+          }).replace(c4.encode || skip, function(m3, code) {
+            needhtmlencode = true;
+            return cse.startencode + unescape(code) + cse.end;
+          }).replace(c4.conditional || skip, function(m3, elsecase, code) {
+            return elsecase ? code ? '\';}else if(' + unescape(code) + '){out+=\'' : '\';}else{out+=\'' : code ? '\';if(' + unescape(code) + '){out+=\'' : '\';}out+=\'';
+          }).replace(c4.iterate || skip, function(m3, iterate, vname, iname) {
+            if (!iterate) {
+              return '\';} } out+=\'';
+            }
+            sid += 1;
+            indv = iname || 'i' + sid;
+            iterate = unescape(iterate);
+            return '\';var arr' + sid + '=' + iterate + ';if(arr' + sid + '){var ' + vname + ',' + indv + '=-1,l' + sid + '=arr' + sid + '.length-1;while(' + indv + '',
+      sameOrigin: ''
+    };
+    definitions.forEach(function(definition) {
+      var name = definition.name;
+      var value = definition.value;
+      var priority = definition.priority;
+      var group = definition.group;
+      constants[name] = value;
+      constants[name + '_PRIO'] = priority;
+      constants[name + '_GROUP'] = group;
+      constants.results[priority] = value;
+      constants.resultGroups[priority] = group;
+      constants.resultGroupMap[value] = group;
+    });
+    Object.freeze(constants.results);
+    Object.freeze(constants.resultGroups);
+    Object.freeze(constants.resultGroupMap);
+    Object.freeze(constants);
+    var constants_default = constants;
+    function log() {
+      if ((typeof console === 'undefined' ? 'undefined' : _typeof(console)) === 'object' && console.log) {
+        Function.prototype.apply.call(console.log, console, arguments);
+      }
+    }
+    var log_default = log;
+    var whitespaceRegex = /[\t\r\n\f]/g;
+    var AbstractVirtualNode = function() {
+      function AbstractVirtualNode() {
+        _classCallCheck(this, AbstractVirtualNode);
+        this.parent = void 0;
+      }
+      return _createClass(AbstractVirtualNode, [ {
+        key: 'props',
+        get: function get() {
+          throw new Error('VirtualNode class must have a "props" object consisting of "nodeType" and "nodeName" properties');
+        }
+      }, {
+        key: 'attrNames',
+        get: function get() {
+          throw new Error('VirtualNode class must have an "attrNames" property');
+        }
+      }, {
+        key: 'attr',
+        value: function attr() {
+          throw new Error('VirtualNode class must have an "attr" function');
+        }
+      }, {
+        key: 'hasAttr',
+        value: function hasAttr() {
+          throw new Error('VirtualNode class must have a "hasAttr" function');
+        }
+      }, {
+        key: 'hasClass',
+        value: function hasClass(className) {
+          var classAttr = this.attr('class');
+          if (!classAttr) {
+            return false;
+          }
+          var selector = ' ' + className + ' ';
+          return (' ' + classAttr + ' ').replace(whitespaceRegex, ' ').indexOf(selector) >= 0;
         }
       } ]);
-      return AbstractVirtualNode;
     }();
     var abstract_virtual_node_default = AbstractVirtualNode;
     var utils_exports = {};
@@ -8387,12 +9219,19 @@
       if (isAncestor) {
         return false;
       }
-      var rect = vNode.boundingClientRect;
+      var position = vNode.getComputedStylePropertyValue('position');
+      if (position === 'fixed') {
+        return false;
+      }
       var nodes = get_overflow_hidden_ancestors_default(vNode);
       if (!nodes.length) {
         return false;
       }
+      var rect = vNode.boundingClientRect;
       return nodes.some(function(node) {
+        if (position === 'absolute' && !hasPositionedAncestorBetween(vNode, node) && node.getComputedStylePropertyValue('position') === 'static') {
+          return false;
+        }
         var nodeRect = node.boundingClientRect;
         if (nodeRect.width < 2 || nodeRect.height < 2) {
           return true;
@@ -8460,6 +9299,16 @@
       }
       return !vNode.parent.hasAttr('open');
     }
+    function hasPositionedAncestorBetween(child, ancestor) {
+      var node = child.parent;
+      while (node && node !== ancestor) {
+        if ([ 'relative', 'sticky' ].includes(node.getComputedStylePropertyValue('position'))) {
+          return true;
+        }
+        node = node.parent;
+      }
+      return false;
+    }
     var hiddenMethods = [ displayHidden, visibilityHidden, contentVisibiltyHidden, detailsHidden ];
     function _isHiddenForEveryone(vNode) {
       var _ref14 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, skipAncestors = _ref14.skipAncestors, _ref14$isAncestor = _ref14.isAncestor, isAncestor = _ref14$isAncestor === void 0 ? false : _ref14$isAncestor;
@@ -8701,7 +9550,7 @@
       var targetRects = get_target_rects_default(vTarget);
       var neighborRects = get_target_rects_default(vNeighbor);
       if (!targetRects.length || !neighborRects.length) {
-        return 0;
+        return null;
       }
       var targetBoundingBox = targetRects.reduce(_getBoundingRect);
       var targetCenter = _getRectCenter(targetBoundingBox);
@@ -8774,16 +9623,11 @@
             return rects.concat(splitRect(inputRect, overlapRect));
           }, []);
           if (uniqueRects.length > 4e3) {
-            return {
-              v: []
-            };
+            throw new Error('splitRects: Too many rects');
           }
-        }, _ret;
+        };
         for (_iterator3.s(); !(_step3 = _iterator3.n()).done; ) {
-          _ret = _loop3();
-          if (_ret) {
-            return _ret.v;
-          }
+          _loop3();
         }
       } catch (err) {
         _iterator3.e(err);
@@ -9071,7 +9915,7 @@
         this.container = container;
         this.cells = [];
       }
-      _createClass(Grid, [ {
+      return _createClass(Grid, [ {
         key: 'toGridIndex',
         value: function toGridIndex(num) {
           return Math.floor(num / constants_default.gridSize);
@@ -9120,7 +9964,6 @@
           return new window.DOMRect(left, top, right - left, bottom - top);
         }
       } ]);
-      return Grid;
     }();
     function loopNegativeIndexMatrix(matrix, start, end, callback) {
       var _matrix$_negativeInde;
@@ -9562,7 +10405,7 @@
       return _splitRects(nodeRect, obscuringRects);
     }
     function isDescendantNotInTabOrder(vAncestor, vNode) {
-      return vAncestor.actualNode.contains(vNode.actualNode) && !_isInTabOrder(vNode);
+      return _contains(vAncestor, vNode) && !_isInTabOrder(vNode);
     }
     var get_target_size_default = memoize_default(getTargetSize);
     function getTargetSize(vNode, minSize) {
@@ -9932,12 +10775,12 @@
     var aria_attrs_default = ariaAttrs;
     var ariaRoles = {
       alert: {
-        type: 'widget',
+        type: 'structure',
         allowedAttrs: [ 'aria-expanded' ],
         superclassRole: [ 'section' ]
       },
       alertdialog: {
-        type: 'widget',
+        type: 'window',
         allowedAttrs: [ 'aria-expanded', 'aria-modal' ],
         superclassRole: [ 'alert', 'dialog' ],
         accessibleNameRequired: true
@@ -10046,7 +10889,7 @@
         prohibitedAttrs: [ 'aria-label', 'aria-labelledby' ]
       },
       dialog: {
-        type: 'widget',
+        type: 'window',
         allowedAttrs: [ 'aria-expanded', 'aria-modal' ],
         superclassRole: [ 'window' ],
         accessibleNameRequired: true
@@ -10160,7 +11003,7 @@
         nameFromContent: true
       },
       log: {
-        type: 'widget',
+        type: 'structure',
         allowedAttrs: [ 'aria-expanded' ],
         superclassRole: [ 'section' ]
       },
@@ -10170,7 +11013,7 @@
         superclassRole: [ 'landmark' ]
       },
       marquee: {
-        type: 'widget',
+        type: 'structure',
         allowedAttrs: [ 'aria-expanded' ],
         superclassRole: [ 'section' ]
       },
@@ -10380,7 +11223,7 @@
         accessibleNameRequired: true
       },
       status: {
-        type: 'widget',
+        type: 'structure',
         allowedAttrs: [ 'aria-expanded' ],
         superclassRole: [ 'section' ]
       },
@@ -10468,7 +11311,7 @@
         superclassRole: [ 'section' ]
       },
       timer: {
-        type: 'widget',
+        type: 'structure',
         allowedAttrs: [ 'aria-expanded' ],
         superclassRole: [ 'status' ]
       },
@@ -10829,7 +11672,7 @@
       },
       button: {
         contentTypes: [ 'interactive', 'phrasing', 'flow' ],
-        allowedRoles: [ 'checkbox', 'combobox', 'link', 'menuitem', 'menuitemcheckbox', 'menuitemradio', 'option', 'radio', 'switch', 'tab' ],
+        allowedRoles: [ 'checkbox', 'combobox', 'gridcell', 'link', 'menuitem', 'menuitemcheckbox', 'menuitemradio', 'option', 'radio', 'separator', 'slider', 'switch', 'tab', 'treeitem' ],
         namingMethods: [ 'subtreeText' ]
       },
       canvas: {
@@ -11214,4469 +12057,3453 @@
         noAriaAttrs: true
       },
       object: {
-        variant: {
-          usemap: {
-            matches: '[usemap]',
-            contentTypes: [ 'interactive', 'embedded', 'phrasing', 'flow' ]
-          },
-          default: {
-            contentTypes: [ 'embedded', 'phrasing', 'flow' ]
-          }
-        },
-        allowedRoles: [ 'application', 'document', 'img' ],
-        chromiumRole: 'PluginObject'
-      },
-      ol: {
-        contentTypes: [ 'flow' ],
-        allowedRoles: [ 'directory', 'group', 'listbox', 'menu', 'menubar', 'none', 'presentation', 'radiogroup', 'tablist', 'toolbar', 'tree' ]
-      },
-      optgroup: {
-        allowedRoles: false
-      },
-      option: {
-        allowedRoles: false,
-        implicitAttrs: {
-          'aria-selected': 'false'
-        }
-      },
-      output: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true,
-        namingMethods: [ 'subtreeText' ]
-      },
-      p: {
-        contentTypes: [ 'flow' ],
-        allowedRoles: true,
-        shadowRoot: true
-      },
-      param: {
-        allowedRoles: false,
-        noAriaAttrs: true
-      },
-      picture: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: false,
-        noAriaAttrs: true
-      },
-      pre: {
-        contentTypes: [ 'flow' ],
-        allowedRoles: true
-      },
-      progress: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: false,
-        implicitAttrs: {
-          'aria-valuemax': '100',
-          'aria-valuemin': '0',
-          'aria-valuenow': '0'
-        }
-      },
-      q: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true
-      },
-      rp: {
-        allowedRoles: true
-      },
-      rt: {
-        allowedRoles: true
-      },
-      ruby: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true
-      },
-      s: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true
-      },
-      samp: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true
-      },
-      script: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: false,
-        noAriaAttrs: true
-      },
-      search: {
-        contentTypes: [ 'flow' ],
-        allowedRoles: [ 'form', 'group', 'none', 'presentation', 'region', 'search' ]
-      },
-      section: {
-        contentTypes: [ 'sectioning', 'flow' ],
-        allowedRoles: [ 'alert', 'alertdialog', 'application', 'banner', 'complementary', 'contentinfo', 'dialog', 'document', 'feed', 'group', 'log', 'main', 'marquee', 'navigation', 'none', 'note', 'presentation', 'search', 'status', 'tabpanel', 'doc-abstract', 'doc-acknowledgments', 'doc-afterword', 'doc-appendix', 'doc-bibliography', 'doc-chapter', 'doc-colophon', 'doc-conclusion', 'doc-credit', 'doc-credits', 'doc-dedication', 'doc-endnotes', 'doc-epigraph', 'doc-epilogue', 'doc-errata', 'doc-example', 'doc-foreword', 'doc-glossary', 'doc-index', 'doc-introduction', 'doc-notice', 'doc-pagelist', 'doc-part', 'doc-preface', 'doc-prologue', 'doc-pullquote', 'doc-qna', 'doc-toc' ],
-        shadowRoot: true
-      },
-      select: {
-        variant: {
-          combobox: {
-            matches: {
-              attributes: {
-                multiple: null,
-                size: [ null, '1' ]
-              }
-            },
-            allowedRoles: [ 'menu' ]
-          },
-          default: {
-            allowedRoles: false
-          }
-        },
-        contentTypes: [ 'interactive', 'phrasing', 'flow' ],
-        implicitAttrs: {
-          'aria-valuenow': ''
-        },
-        namingMethods: [ 'labelText' ]
-      },
-      slot: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: false,
-        noAriaAttrs: true
-      },
-      small: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true
-      },
-      source: {
-        allowedRoles: false,
-        noAriaAttrs: true
-      },
-      span: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true,
-        shadowRoot: true
-      },
-      strong: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true
-      },
-      style: {
-        allowedRoles: false,
-        noAriaAttrs: true
-      },
-      svg: {
-        contentTypes: [ 'embedded', 'phrasing', 'flow' ],
-        allowedRoles: true,
-        chromiumRole: 'SVGRoot',
-        namingMethods: [ 'svgTitleText' ]
-      },
-      sub: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true
-      },
-      summary: {
-        allowedRoles: false,
-        namingMethods: [ 'subtreeText' ]
-      },
-      sup: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true
-      },
-      table: {
-        contentTypes: [ 'flow' ],
-        allowedRoles: true,
-        namingMethods: [ 'tableCaptionText', 'tableSummaryText' ]
-      },
-      tbody: {
-        allowedRoles: true
-      },
-      template: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: false,
-        noAriaAttrs: true
-      },
-      textarea: {
-        contentTypes: [ 'interactive', 'phrasing', 'flow' ],
-        allowedRoles: false,
-        implicitAttrs: {
-          'aria-valuenow': '',
-          'aria-multiline': 'true'
-        },
-        namingMethods: [ 'labelText', 'placeholderText' ]
-      },
-      tfoot: {
-        allowedRoles: true
-      },
-      thead: {
-        allowedRoles: true
-      },
-      time: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true
-      },
-      title: {
-        allowedRoles: false,
-        noAriaAttrs: true
-      },
-      td: {
-        allowedRoles: true
-      },
-      th: {
-        allowedRoles: true
-      },
-      tr: {
-        allowedRoles: true
-      },
-      track: {
-        allowedRoles: false,
-        noAriaAttrs: true
-      },
-      u: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true
-      },
-      ul: {
-        contentTypes: [ 'flow' ],
-        allowedRoles: [ 'directory', 'group', 'listbox', 'menu', 'menubar', 'none', 'presentation', 'radiogroup', 'tablist', 'toolbar', 'tree' ]
-      },
-      var: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: true
-      },
-      video: {
-        variant: {
-          controls: {
-            matches: '[controls]',
+        variant: {
+          usemap: {
+            matches: '[usemap]',
             contentTypes: [ 'interactive', 'embedded', 'phrasing', 'flow' ]
           },
           default: {
             contentTypes: [ 'embedded', 'phrasing', 'flow' ]
           }
         },
-        allowedRoles: [ 'application' ],
-        chromiumRole: 'video'
+        allowedRoles: [ 'application', 'document', 'img' ],
+        chromiumRole: 'PluginObject'
       },
-      wbr: {
-        contentTypes: [ 'phrasing', 'flow' ],
-        allowedRoles: [ 'presentation', 'none' ]
-      }
-    };
-    var html_elms_default = htmlElms;
-    var cssColors = {
-      aliceblue: [ 240, 248, 255 ],
-      antiquewhite: [ 250, 235, 215 ],
-      aqua: [ 0, 255, 255 ],
-      aquamarine: [ 127, 255, 212 ],
-      azure: [ 240, 255, 255 ],
-      beige: [ 245, 245, 220 ],
-      bisque: [ 255, 228, 196 ],
-      black: [ 0, 0, 0 ],
-      blanchedalmond: [ 255, 235, 205 ],
-      blue: [ 0, 0, 255 ],
-      blueviolet: [ 138, 43, 226 ],
-      brown: [ 165, 42, 42 ],
-      burlywood: [ 222, 184, 135 ],
-      cadetblue: [ 95, 158, 160 ],
-      chartreuse: [ 127, 255, 0 ],
-      chocolate: [ 210, 105, 30 ],
-      coral: [ 255, 127, 80 ],
-      cornflowerblue: [ 100, 149, 237 ],
-      cornsilk: [ 255, 248, 220 ],
-      crimson: [ 220, 20, 60 ],
-      cyan: [ 0, 255, 255 ],
-      darkblue: [ 0, 0, 139 ],
-      darkcyan: [ 0, 139, 139 ],
-      darkgoldenrod: [ 184, 134, 11 ],
-      darkgray: [ 169, 169, 169 ],
-      darkgreen: [ 0, 100, 0 ],
-      darkgrey: [ 169, 169, 169 ],
-      darkkhaki: [ 189, 183, 107 ],
-      darkmagenta: [ 139, 0, 139 ],
-      darkolivegreen: [ 85, 107, 47 ],
-      darkorange: [ 255, 140, 0 ],
-      darkorchid: [ 153, 50, 204 ],
-      darkred: [ 139, 0, 0 ],
-      darksalmon: [ 233, 150, 122 ],
-      darkseagreen: [ 143, 188, 143 ],
-      darkslateblue: [ 72, 61, 139 ],
-      darkslategray: [ 47, 79, 79 ],
-      darkslategrey: [ 47, 79, 79 ],
-      darkturquoise: [ 0, 206, 209 ],
-      darkviolet: [ 148, 0, 211 ],
-      deeppink: [ 255, 20, 147 ],
-      deepskyblue: [ 0, 191, 255 ],
-      dimgray: [ 105, 105, 105 ],
-      dimgrey: [ 105, 105, 105 ],
-      dodgerblue: [ 30, 144, 255 ],
-      firebrick: [ 178, 34, 34 ],
-      floralwhite: [ 255, 250, 240 ],
-      forestgreen: [ 34, 139, 34 ],
-      fuchsia: [ 255, 0, 255 ],
-      gainsboro: [ 220, 220, 220 ],
-      ghostwhite: [ 248, 248, 255 ],
-      gold: [ 255, 215, 0 ],
-      goldenrod: [ 218, 165, 32 ],
-      gray: [ 128, 128, 128 ],
-      green: [ 0, 128, 0 ],
-      greenyellow: [ 173, 255, 47 ],
-      grey: [ 128, 128, 128 ],
-      honeydew: [ 240, 255, 240 ],
-      hotpink: [ 255, 105, 180 ],
-      indianred: [ 205, 92, 92 ],
-      indigo: [ 75, 0, 130 ],
-      ivory: [ 255, 255, 240 ],
-      khaki: [ 240, 230, 140 ],
-      lavender: [ 230, 230, 250 ],
-      lavenderblush: [ 255, 240, 245 ],
-      lawngreen: [ 124, 252, 0 ],
-      lemonchiffon: [ 255, 250, 205 ],
-      lightblue: [ 173, 216, 230 ],
-      lightcoral: [ 240, 128, 128 ],
-      lightcyan: [ 224, 255, 255 ],
-      lightgoldenrodyellow: [ 250, 250, 210 ],
-      lightgray: [ 211, 211, 211 ],
-      lightgreen: [ 144, 238, 144 ],
-      lightgrey: [ 211, 211, 211 ],
-      lightpink: [ 255, 182, 193 ],
-      lightsalmon: [ 255, 160, 122 ],
-      lightseagreen: [ 32, 178, 170 ],
-      lightskyblue: [ 135, 206, 250 ],
-      lightslategray: [ 119, 136, 153 ],
-      lightslategrey: [ 119, 136, 153 ],
-      lightsteelblue: [ 176, 196, 222 ],
-      lightyellow: [ 255, 255, 224 ],
-      lime: [ 0, 255, 0 ],
-      limegreen: [ 50, 205, 50 ],
-      linen: [ 250, 240, 230 ],
-      magenta: [ 255, 0, 255 ],
-      maroon: [ 128, 0, 0 ],
-      mediumaquamarine: [ 102, 205, 170 ],
-      mediumblue: [ 0, 0, 205 ],
-      mediumorchid: [ 186, 85, 211 ],
-      mediumpurple: [ 147, 112, 219 ],
-      mediumseagreen: [ 60, 179, 113 ],
-      mediumslateblue: [ 123, 104, 238 ],
-      mediumspringgreen: [ 0, 250, 154 ],
-      mediumturquoise: [ 72, 209, 204 ],
-      mediumvioletred: [ 199, 21, 133 ],
-      midnightblue: [ 25, 25, 112 ],
-      mintcream: [ 245, 255, 250 ],
-      mistyrose: [ 255, 228, 225 ],
-      moccasin: [ 255, 228, 181 ],
-      navajowhite: [ 255, 222, 173 ],
-      navy: [ 0, 0, 128 ],
-      oldlace: [ 253, 245, 230 ],
-      olive: [ 128, 128, 0 ],
-      olivedrab: [ 107, 142, 35 ],
-      orange: [ 255, 165, 0 ],
-      orangered: [ 255, 69, 0 ],
-      orchid: [ 218, 112, 214 ],
-      palegoldenrod: [ 238, 232, 170 ],
-      palegreen: [ 152, 251, 152 ],
-      paleturquoise: [ 175, 238, 238 ],
-      palevioletred: [ 219, 112, 147 ],
-      papayawhip: [ 255, 239, 213 ],
-      peachpuff: [ 255, 218, 185 ],
-      peru: [ 205, 133, 63 ],
-      pink: [ 255, 192, 203 ],
-      plum: [ 221, 160, 221 ],
-      powderblue: [ 176, 224, 230 ],
-      purple: [ 128, 0, 128 ],
-      rebeccapurple: [ 102, 51, 153 ],
-      red: [ 255, 0, 0 ],
-      rosybrown: [ 188, 143, 143 ],
-      royalblue: [ 65, 105, 225 ],
-      saddlebrown: [ 139, 69, 19 ],
-      salmon: [ 250, 128, 114 ],
-      sandybrown: [ 244, 164, 96 ],
-      seagreen: [ 46, 139, 87 ],
-      seashell: [ 255, 245, 238 ],
-      sienna: [ 160, 82, 45 ],
-      silver: [ 192, 192, 192 ],
-      skyblue: [ 135, 206, 235 ],
-      slateblue: [ 106, 90, 205 ],
-      slategray: [ 112, 128, 144 ],
-      slategrey: [ 112, 128, 144 ],
-      snow: [ 255, 250, 250 ],
-      springgreen: [ 0, 255, 127 ],
-      steelblue: [ 70, 130, 180 ],
-      tan: [ 210, 180, 140 ],
-      teal: [ 0, 128, 128 ],
-      thistle: [ 216, 191, 216 ],
-      tomato: [ 255, 99, 71 ],
-      turquoise: [ 64, 224, 208 ],
-      violet: [ 238, 130, 238 ],
-      wheat: [ 245, 222, 179 ],
-      white: [ 255, 255, 255 ],
-      whitesmoke: [ 245, 245, 245 ],
-      yellow: [ 255, 255, 0 ],
-      yellowgreen: [ 154, 205, 50 ]
-    };
-    var css_colors_default = cssColors;
-    var originals = {
-      ariaAttrs: aria_attrs_default,
-      ariaRoles: _extends({}, aria_roles_default, dpub_roles_default, graphics_roles_default),
-      htmlElms: html_elms_default,
-      cssColors: css_colors_default
-    };
-    var standards = _extends({}, originals);
-    function configureStandards(config) {
-      Object.keys(standards).forEach(function(propName) {
-        if (config[propName]) {
-          standards[propName] = deep_merge_default(standards[propName], config[propName]);
-        }
-      });
-    }
-    function resetStandards() {
-      Object.keys(standards).forEach(function(propName) {
-        standards[propName] = originals[propName];
-      });
-    }
-    var standards_default = standards;
-    function isUnsupportedRole(role) {
-      var roleDefinition = standards_default.ariaRoles[role];
-      return roleDefinition ? !!roleDefinition.unsupported : false;
-    }
-    var is_unsupported_role_default = isUnsupportedRole;
-    function isValidRole(role) {
-      var _ref26 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, allowAbstract = _ref26.allowAbstract, _ref26$flagUnsupporte = _ref26.flagUnsupported, flagUnsupported = _ref26$flagUnsupporte === void 0 ? false : _ref26$flagUnsupporte;
-      var roleDefinition = standards_default.ariaRoles[role];
-      var isRoleUnsupported = is_unsupported_role_default(role);
-      if (!roleDefinition || flagUnsupported && isRoleUnsupported) {
-        return false;
-      }
-      return allowAbstract ? true : roleDefinition.type !== 'abstract';
-    }
-    var is_valid_role_default = isValidRole;
-    function getExplicitRole(vNode) {
-      var _ref27 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, fallback = _ref27.fallback, abstracts = _ref27.abstracts, dpub = _ref27.dpub;
-      vNode = vNode instanceof abstract_virtual_node_default ? vNode : get_node_from_tree_default(vNode);
-      if (vNode.props.nodeType !== 1) {
-        return null;
-      }
-      var roleAttr = (vNode.attr('role') || '').trim().toLowerCase();
-      var roleList = fallback ? token_list_default(roleAttr) : [ roleAttr ];
-      var firstValidRole = roleList.find(function(role) {
-        if (!dpub && role.substr(0, 4) === 'doc-') {
-          return false;
-        }
-        return is_valid_role_default(role, {
-          allowAbstract: abstracts
-        });
-      });
-      return firstValidRole || null;
-    }
-    var get_explicit_role_default = getExplicitRole;
-    function getElementsByContentType(type2) {
-      return Object.keys(standards_default.htmlElms).filter(function(nodeName2) {
-        var elm = standards_default.htmlElms[nodeName2];
-        if (elm.contentTypes) {
-          return elm.contentTypes.includes(type2);
-        }
-        if (!elm.variant) {
-          return false;
+      ol: {
+        contentTypes: [ 'flow' ],
+        allowedRoles: [ 'directory', 'group', 'listbox', 'menu', 'menubar', 'none', 'presentation', 'radiogroup', 'tablist', 'toolbar', 'tree' ]
+      },
+      optgroup: {
+        allowedRoles: false
+      },
+      option: {
+        allowedRoles: false,
+        implicitAttrs: {
+          'aria-selected': 'false'
         }
-        if (elm.variant['default'] && elm.variant['default'].contentTypes) {
-          return elm.variant['default'].contentTypes.includes(type2);
+      },
+      output: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true,
+        namingMethods: [ 'subtreeText' ]
+      },
+      p: {
+        contentTypes: [ 'flow' ],
+        allowedRoles: true,
+        shadowRoot: true
+      },
+      param: {
+        allowedRoles: false,
+        noAriaAttrs: true
+      },
+      picture: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: false,
+        noAriaAttrs: true
+      },
+      pre: {
+        contentTypes: [ 'flow' ],
+        allowedRoles: true
+      },
+      progress: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: false,
+        implicitAttrs: {
+          'aria-valuemax': '100',
+          'aria-valuemin': '0',
+          'aria-valuenow': '0'
         }
-        return false;
-      });
-    }
-    var get_elements_by_content_type_default = getElementsByContentType;
-    function getGlobalAriaAttrs() {
-      return cache_default.get('globalAriaAttrs', function() {
-        return Object.keys(standards_default.ariaAttrs).filter(function(attrName) {
-          return standards_default.ariaAttrs[attrName].global;
-        });
-      });
-    }
-    var get_global_aria_attrs_default = getGlobalAriaAttrs;
-    function toGrid(node) {
-      var table = [];
-      var rows = node.rows;
-      for (var i = 0, rowLength = rows.length; i < rowLength; i++) {
-        var cells = rows[i].cells;
-        table[i] = table[i] || [];
-        var columnIndex = 0;
-        for (var j = 0, cellLength = cells.length; j < cellLength; j++) {
-          for (var colSpan = 0; colSpan < cells[j].colSpan; colSpan++) {
-            var rowspanAttr = cells[j].getAttribute('rowspan');
-            var rowspanValue = parseInt(rowspanAttr) === 0 || cells[j].rowspan === 0 ? rows.length : cells[j].rowSpan;
-            for (var rowSpan = 0; rowSpan < rowspanValue; rowSpan++) {
-              table[i + rowSpan] = table[i + rowSpan] || [];
-              while (table[i + rowSpan][columnIndex]) {
-                columnIndex++;
+      },
+      q: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true
+      },
+      rp: {
+        allowedRoles: true
+      },
+      rt: {
+        allowedRoles: true
+      },
+      ruby: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true
+      },
+      s: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true
+      },
+      samp: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true
+      },
+      script: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: false,
+        noAriaAttrs: true
+      },
+      search: {
+        contentTypes: [ 'flow' ],
+        allowedRoles: [ 'form', 'group', 'none', 'presentation', 'region', 'search' ]
+      },
+      section: {
+        contentTypes: [ 'sectioning', 'flow' ],
+        allowedRoles: [ 'alert', 'alertdialog', 'application', 'banner', 'complementary', 'contentinfo', 'dialog', 'document', 'feed', 'group', 'log', 'main', 'marquee', 'navigation', 'none', 'note', 'presentation', 'search', 'status', 'tabpanel', 'doc-abstract', 'doc-acknowledgments', 'doc-afterword', 'doc-appendix', 'doc-bibliography', 'doc-chapter', 'doc-colophon', 'doc-conclusion', 'doc-credit', 'doc-credits', 'doc-dedication', 'doc-endnotes', 'doc-epigraph', 'doc-epilogue', 'doc-errata', 'doc-example', 'doc-foreword', 'doc-glossary', 'doc-index', 'doc-introduction', 'doc-notice', 'doc-pagelist', 'doc-part', 'doc-preface', 'doc-prologue', 'doc-pullquote', 'doc-qna', 'doc-toc' ],
+        shadowRoot: true
+      },
+      select: {
+        variant: {
+          combobox: {
+            matches: {
+              attributes: {
+                multiple: null,
+                size: [ null, '1' ]
               }
-              table[i + rowSpan][columnIndex] = cells[j];
-            }
-            columnIndex++;
-          }
-        }
-      }
-      return table;
-    }
-    var to_grid_default = memoize_default(toGrid);
-    function getCellPosition(cell, tableGrid) {
-      var rowIndex, index;
-      if (!tableGrid) {
-        tableGrid = to_grid_default(find_up_default(cell, 'table'));
-      }
-      for (rowIndex = 0; rowIndex < tableGrid.length; rowIndex++) {
-        if (tableGrid[rowIndex]) {
-          index = tableGrid[rowIndex].indexOf(cell);
-          if (index !== -1) {
-            return {
-              x: index,
-              y: rowIndex
-            };
+            },
+            allowedRoles: [ 'menu' ]
+          },
+          default: {
+            allowedRoles: false
           }
-        }
-      }
-    }
-    var get_cell_position_default = memoize_default(getCellPosition);
-    function _getScope(el) {
-      var _nodeLookup9 = _nodeLookup(el), vNode = _nodeLookup9.vNode, cell = _nodeLookup9.domNode;
-      var scope = vNode.attr('scope');
-      var role = vNode.attr('role');
-      if (![ 'td', 'th' ].includes(vNode.props.nodeName)) {
-        throw new TypeError('Expected TD or TH element');
-      }
-      if (role === 'columnheader') {
-        return 'col';
-      } else if (role === 'rowheader') {
-        return 'row';
-      } else if (scope === 'col' || scope === 'row') {
-        return scope;
-      } else if (vNode.props.nodeName !== 'th') {
-        return false;
-      } else if (!vNode.actualNode) {
-        return 'auto';
-      }
-      var tableGrid = to_grid_default(find_up_default(cell, 'table'));
-      var pos = get_cell_position_default(cell, tableGrid);
-      var headerRow = tableGrid[pos.y].every(function(node) {
-        return node.nodeName.toUpperCase() === 'TH';
-      });
-      if (headerRow) {
-        return 'col';
-      }
-      var headerCol = tableGrid.map(function(col) {
-        return col[pos.x];
-      }).every(function(node) {
-        return node && node.nodeName.toUpperCase() === 'TH';
-      });
-      if (headerCol) {
-        return 'row';
-      }
-      return 'auto';
-    }
-    function isColumnHeader(element) {
-      return [ 'col', 'auto' ].indexOf(_getScope(element)) !== -1;
-    }
-    var is_column_header_default = isColumnHeader;
-    function isRowHeader(cell) {
-      return [ 'row', 'auto' ].includes(_getScope(cell));
-    }
-    var is_row_header_default = isRowHeader;
-    function sanitize(str) {
-      if (!str) {
-        return '';
-      }
-      return str.replace(/\r\n/g, '\n').replace(/\u00A0/g, ' ').replace(/[\s]{2,}/g, ' ').trim();
-    }
-    var sanitize_default = sanitize;
-    var getSectioningElementSelector = function getSectioningElementSelector() {
-      return cache_default.get('sectioningElementSelector', function() {
-        return get_elements_by_content_type_default('sectioning').map(function(nodeName2) {
-          return ''.concat(nodeName2, ':not([role])');
-        }).join(', ') + ' , main:not([role]), [role=article], [role=complementary], [role=main], [role=navigation], [role=region]';
-      });
-    };
-    function hasAccessibleName(vNode) {
-      var ariaLabelledby = sanitize_default(arialabelledby_text_default(vNode));
-      var ariaLabel = sanitize_default(_arialabelText(vNode));
-      return !!(ariaLabelledby || ariaLabel);
-    }
-    var implicitHtmlRoles = {
-      a: function a(vNode) {
-        return vNode.hasAttr('href') ? 'link' : null;
+        },
+        contentTypes: [ 'interactive', 'phrasing', 'flow' ],
+        implicitAttrs: {
+          'aria-valuenow': ''
+        },
+        namingMethods: [ 'labelText' ]
       },
-      area: function area(vNode) {
-        return vNode.hasAttr('href') ? 'link' : null;
+      slot: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: false,
+        noAriaAttrs: true
       },
-      article: 'article',
-      aside: 'complementary',
-      body: 'document',
-      button: 'button',
-      datalist: 'listbox',
-      dd: 'definition',
-      dfn: 'term',
-      details: 'group',
-      dialog: 'dialog',
-      dt: 'term',
-      fieldset: 'group',
-      figure: 'figure',
-      footer: function footer(vNode) {
-        var sectioningElement = closest_default(vNode, getSectioningElementSelector());
-        return !sectioningElement ? 'contentinfo' : null;
+      small: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true
       },
-      form: function form(vNode) {
-        return hasAccessibleName(vNode) ? 'form' : null;
+      source: {
+        allowedRoles: false,
+        noAriaAttrs: true
+      },
+      span: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true,
+        shadowRoot: true
+      },
+      strong: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true
+      },
+      style: {
+        allowedRoles: false,
+        noAriaAttrs: true
+      },
+      svg: {
+        contentTypes: [ 'embedded', 'phrasing', 'flow' ],
+        allowedRoles: true,
+        chromiumRole: 'SVGRoot',
+        namingMethods: [ 'svgTitleText' ]
+      },
+      sub: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true
+      },
+      summary: {
+        allowedRoles: false,
+        namingMethods: [ 'subtreeText' ]
+      },
+      sup: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true
+      },
+      table: {
+        contentTypes: [ 'flow' ],
+        allowedRoles: true,
+        namingMethods: [ 'tableCaptionText', 'tableSummaryText' ]
+      },
+      tbody: {
+        allowedRoles: true
       },
-      h1: 'heading',
-      h2: 'heading',
-      h3: 'heading',
-      h4: 'heading',
-      h5: 'heading',
-      h6: 'heading',
-      header: function header(vNode) {
-        var sectioningElement = closest_default(vNode, getSectioningElementSelector());
-        return !sectioningElement ? 'banner' : null;
+      template: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: false,
+        noAriaAttrs: true
       },
-      hr: 'separator',
-      img: function img(vNode) {
-        var emptyAlt = vNode.hasAttr('alt') && !vNode.attr('alt');
-        var hasGlobalAria = get_global_aria_attrs_default().find(function(attr) {
-          return vNode.hasAttr(attr);
-        });
-        return emptyAlt && !hasGlobalAria && !_isFocusable(vNode) ? 'presentation' : 'img';
+      textarea: {
+        contentTypes: [ 'interactive', 'phrasing', 'flow' ],
+        allowedRoles: false,
+        implicitAttrs: {
+          'aria-valuenow': '',
+          'aria-multiline': 'true'
+        },
+        namingMethods: [ 'labelText', 'placeholderText' ]
       },
-      input: function input(vNode) {
-        var suggestionsSourceElement;
-        if (vNode.hasAttr('list')) {
-          var listElement = idrefs_default(vNode.actualNode, 'list').filter(function(node) {
-            return !!node;
-          })[0];
-          suggestionsSourceElement = listElement && listElement.nodeName.toLowerCase() === 'datalist';
-        }
-        switch (vNode.props.type) {
-         case 'checkbox':
-          return 'checkbox';
-
-         case 'number':
-          return 'spinbutton';
-
-         case 'radio':
-          return 'radio';
-
-         case 'range':
-          return 'slider';
-
-         case 'search':
-          return !suggestionsSourceElement ? 'searchbox' : 'combobox';
-
-         case 'button':
-         case 'image':
-         case 'reset':
-         case 'submit':
-          return 'button';
-
-         case 'text':
-         case 'tel':
-         case 'url':
-         case 'email':
-         case '':
-          return !suggestionsSourceElement ? 'textbox' : 'combobox';
-
-         default:
-          return 'textbox';
-        }
+      tfoot: {
+        allowedRoles: true
       },
-      li: 'listitem',
-      main: 'main',
-      math: 'math',
-      menu: 'list',
-      nav: 'navigation',
-      ol: 'list',
-      optgroup: 'group',
-      option: 'option',
-      output: 'status',
-      progress: 'progressbar',
-      search: 'search',
-      section: function section(vNode) {
-        return hasAccessibleName(vNode) ? 'region' : null;
+      thead: {
+        allowedRoles: true
       },
-      select: function select(vNode) {
-        return vNode.hasAttr('multiple') || parseInt(vNode.attr('size')) > 1 ? 'listbox' : 'combobox';
+      time: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true
       },
-      summary: 'button',
-      table: 'table',
-      tbody: 'rowgroup',
-      td: function td(vNode) {
-        var table = closest_default(vNode, 'table');
-        var role = get_explicit_role_default(table);
-        return [ 'grid', 'treegrid' ].includes(role) ? 'gridcell' : 'cell';
+      title: {
+        allowedRoles: false,
+        noAriaAttrs: true
       },
-      textarea: 'textbox',
-      tfoot: 'rowgroup',
-      th: function th(vNode) {
-        if (is_column_header_default(vNode)) {
-          return 'columnheader';
-        }
-        if (is_row_header_default(vNode)) {
-          return 'rowheader';
-        }
+      td: {
+        allowedRoles: true
       },
-      thead: 'rowgroup',
-      tr: 'row',
-      ul: 'list'
-    };
-    var implicit_html_roles_default = implicitHtmlRoles;
-    function fromPrimative(someString, matcher) {
-      var matcherType = _typeof(matcher);
-      if (Array.isArray(matcher) && typeof someString !== 'undefined') {
-        return matcher.includes(someString);
-      }
-      if (matcherType === 'function') {
-        return !!matcher(someString);
-      }
-      if (someString !== null && someString !== void 0) {
-        if (matcher instanceof RegExp) {
-          return matcher.test(someString);
-        }
-        if (/^\/.*\/$/.test(matcher)) {
-          var pattern = matcher.substring(1, matcher.length - 1);
-          return new RegExp(pattern).test(someString);
-        }
-      }
-      return matcher === someString;
-    }
-    var from_primative_default = fromPrimative;
-    function hasAccessibleName2(vNode, matcher) {
-      return from_primative_default(!!_accessibleTextVirtual(vNode), matcher);
-    }
-    var has_accessible_name_default = hasAccessibleName2;
-    function fromFunction(getValue, matcher) {
-      var matcherType = _typeof(matcher);
-      if (matcherType !== 'object' || Array.isArray(matcher) || matcher instanceof RegExp) {
-        throw new Error('Expect matcher to be an object');
-      }
-      return Object.keys(matcher).every(function(propName) {
-        return from_primative_default(getValue(propName), matcher[propName]);
-      });
-    }
-    var from_function_default = fromFunction;
-    function attributes(vNode, matcher) {
-      vNode = _nodeLookup(vNode).vNode;
-      return from_function_default(function(attrName) {
-        return vNode.attr(attrName);
-      }, matcher);
-    }
-    var attributes_default = attributes;
-    function condition(arg, matcher) {
-      return !!matcher(arg);
-    }
-    function explicitRole(vNode, matcher) {
-      return from_primative_default(get_explicit_role_default(vNode), matcher);
-    }
-    var explicit_role_default = explicitRole;
-    function implicitRole(vNode, matcher) {
-      return from_primative_default(implicit_role_default(vNode), matcher);
-    }
-    var implicit_role_default2 = implicitRole;
-    function nodeName(vNode, matcher) {
-      vNode = _nodeLookup(vNode).vNode;
-      return from_primative_default(vNode.props.nodeName, matcher);
-    }
-    var node_name_default = nodeName;
-    function properties(vNode, matcher) {
-      vNode = _nodeLookup(vNode).vNode;
-      return from_function_default(function(propName) {
-        return vNode.props[propName];
-      }, matcher);
-    }
-    var properties_default = properties;
-    function semanticRole(vNode, matcher) {
-      return from_primative_default(get_role_default(vNode), matcher);
-    }
-    var semantic_role_default = semanticRole;
-    var matchers = {
-      hasAccessibleName: has_accessible_name_default,
-      attributes: attributes_default,
-      condition: condition,
-      explicitRole: explicit_role_default,
-      implicitRole: implicit_role_default2,
-      nodeName: node_name_default,
-      properties: properties_default,
-      semanticRole: semantic_role_default
-    };
-    function fromDefinition(vNode, definition) {
-      vNode = _nodeLookup(vNode).vNode;
-      if (Array.isArray(definition)) {
-        return definition.some(function(definitionItem) {
-          return fromDefinition(vNode, definitionItem);
-        });
-      }
-      if (typeof definition === 'string') {
-        return _matches(vNode, definition);
-      }
-      return Object.keys(definition).every(function(matcherName) {
-        if (!matchers[matcherName]) {
-          throw new Error('Unknown matcher type "'.concat(matcherName, '"'));
-        }
-        var matchMethod = matchers[matcherName];
-        var matcher = definition[matcherName];
-        return matchMethod(vNode, matcher);
-      });
-    }
-    var from_definition_default = fromDefinition;
-    function matches2(vNode, definition) {
-      return from_definition_default(vNode, definition);
-    }
-    var matches_default = matches2;
-    matches_default.hasAccessibleName = has_accessible_name_default;
-    matches_default.attributes = attributes_default;
-    matches_default.condition = condition;
-    matches_default.explicitRole = explicit_role_default;
-    matches_default.fromDefinition = from_definition_default;
-    matches_default.fromFunction = from_function_default;
-    matches_default.fromPrimative = from_primative_default;
-    matches_default.implicitRole = implicit_role_default2;
-    matches_default.nodeName = node_name_default;
-    matches_default.properties = properties_default;
-    matches_default.semanticRole = semantic_role_default;
-    var matches_default2 = matches_default;
-    function getElementSpec(vNode) {
-      var _ref28 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, _ref28$noMatchAccessi = _ref28.noMatchAccessibleName, noMatchAccessibleName = _ref28$noMatchAccessi === void 0 ? false : _ref28$noMatchAccessi;
-      var standard = standards_default.htmlElms[vNode.props.nodeName];
-      if (!standard) {
-        return {};
-      }
-      if (!standard.variant) {
-        return standard;
-      }
-      var variant = standard.variant, spec = _objectWithoutProperties(standard, _excluded4);
-      for (var variantName in variant) {
-        if (!variant.hasOwnProperty(variantName) || variantName === 'default') {
-          continue;
-        }
-        var _variant$variantName = variant[variantName], matches4 = _variant$variantName.matches, props = _objectWithoutProperties(_variant$variantName, _excluded5);
-        var matchProperties = Array.isArray(matches4) ? matches4 : [ matches4 ];
-        for (var _i9 = 0; _i9 < matchProperties.length && noMatchAccessibleName; _i9++) {
-          if (matchProperties[_i9].hasOwnProperty('hasAccessibleName')) {
-            return standard;
-          }
-        }
-        if (matches_default2(vNode, matches4)) {
-          for (var propName in props) {
-            if (props.hasOwnProperty(propName)) {
-              spec[propName] = props[propName];
-            }
+      th: {
+        allowedRoles: true
+      },
+      tr: {
+        allowedRoles: true
+      },
+      track: {
+        allowedRoles: false,
+        noAriaAttrs: true
+      },
+      u: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true
+      },
+      ul: {
+        contentTypes: [ 'flow' ],
+        allowedRoles: [ 'directory', 'group', 'listbox', 'menu', 'menubar', 'none', 'presentation', 'radiogroup', 'tablist', 'toolbar', 'tree' ]
+      },
+      var: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: true
+      },
+      video: {
+        variant: {
+          controls: {
+            matches: '[controls]',
+            contentTypes: [ 'interactive', 'embedded', 'phrasing', 'flow' ]
+          },
+          default: {
+            contentTypes: [ 'embedded', 'phrasing', 'flow' ]
           }
-        }
-      }
-      for (var _propName in variant['default']) {
-        if (variant['default'].hasOwnProperty(_propName) && typeof spec[_propName] === 'undefined') {
-          spec[_propName] = variant['default'][_propName];
-        }
-      }
-      return spec;
-    }
-    var get_element_spec_default = getElementSpec;
-    function implicitRole2(node) {
-      var _ref29 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, chromium = _ref29.chromium;
-      var vNode = node instanceof abstract_virtual_node_default ? node : get_node_from_tree_default(node);
-      node = vNode.actualNode;
-      if (!vNode) {
-        throw new ReferenceError('Cannot get implicit role of a node outside the current scope.');
-      }
-      var nodeName2 = vNode.props.nodeName;
-      var role = implicit_html_roles_default[nodeName2];
-      if (!role && chromium) {
-        var _get_element_spec_def = get_element_spec_default(vNode), chromiumRole = _get_element_spec_def.chromiumRole;
-        return chromiumRole || null;
-      }
-      if (typeof role === 'function') {
-        return role(vNode);
+        },
+        allowedRoles: [ 'application' ],
+        chromiumRole: 'video'
+      },
+      wbr: {
+        contentTypes: [ 'phrasing', 'flow' ],
+        allowedRoles: [ 'presentation', 'none' ]
       }
-      return role || null;
-    }
-    var implicit_role_default = implicitRole2;
-    var inheritsPresentationChain = {
-      td: [ 'tr' ],
-      th: [ 'tr' ],
-      tr: [ 'thead', 'tbody', 'tfoot', 'table' ],
-      thead: [ 'table' ],
-      tbody: [ 'table' ],
-      tfoot: [ 'table' ],
-      li: [ 'ol', 'ul' ],
-      dt: [ 'dl', 'div' ],
-      dd: [ 'dl', 'div' ],
-      div: [ 'dl' ]
     };
-    function getInheritedRole(vNode, explicitRoleOptions) {
-      var parentNodeNames = inheritsPresentationChain[vNode.props.nodeName];
-      if (!parentNodeNames) {
-        return null;
-      }
-      if (!vNode.parent) {
-        if (!vNode.actualNode) {
-          return null;
-        }
-        throw new ReferenceError('Cannot determine role presentational inheritance of a required parent outside the current scope.');
-      }
-      if (!parentNodeNames.includes(vNode.parent.props.nodeName)) {
-        return null;
-      }
-      var parentRole = get_explicit_role_default(vNode.parent, explicitRoleOptions);
-      if ([ 'none', 'presentation' ].includes(parentRole) && !hasConflictResolution(vNode.parent)) {
-        return parentRole;
-      }
-      if (parentRole) {
-        return null;
-      }
-      return getInheritedRole(vNode.parent, explicitRoleOptions);
-    }
-    function resolveImplicitRole(vNode, _ref30) {
-      var chromium = _ref30.chromium, explicitRoleOptions = _objectWithoutProperties(_ref30, _excluded6);
-      var implicitRole3 = implicit_role_default(vNode, {
-        chromium: chromium
+    var html_elms_default = htmlElms;
+    var cssColors = {
+      aliceblue: [ 240, 248, 255 ],
+      antiquewhite: [ 250, 235, 215 ],
+      aqua: [ 0, 255, 255 ],
+      aquamarine: [ 127, 255, 212 ],
+      azure: [ 240, 255, 255 ],
+      beige: [ 245, 245, 220 ],
+      bisque: [ 255, 228, 196 ],
+      black: [ 0, 0, 0 ],
+      blanchedalmond: [ 255, 235, 205 ],
+      blue: [ 0, 0, 255 ],
+      blueviolet: [ 138, 43, 226 ],
+      brown: [ 165, 42, 42 ],
+      burlywood: [ 222, 184, 135 ],
+      cadetblue: [ 95, 158, 160 ],
+      chartreuse: [ 127, 255, 0 ],
+      chocolate: [ 210, 105, 30 ],
+      coral: [ 255, 127, 80 ],
+      cornflowerblue: [ 100, 149, 237 ],
+      cornsilk: [ 255, 248, 220 ],
+      crimson: [ 220, 20, 60 ],
+      cyan: [ 0, 255, 255 ],
+      darkblue: [ 0, 0, 139 ],
+      darkcyan: [ 0, 139, 139 ],
+      darkgoldenrod: [ 184, 134, 11 ],
+      darkgray: [ 169, 169, 169 ],
+      darkgreen: [ 0, 100, 0 ],
+      darkgrey: [ 169, 169, 169 ],
+      darkkhaki: [ 189, 183, 107 ],
+      darkmagenta: [ 139, 0, 139 ],
+      darkolivegreen: [ 85, 107, 47 ],
+      darkorange: [ 255, 140, 0 ],
+      darkorchid: [ 153, 50, 204 ],
+      darkred: [ 139, 0, 0 ],
+      darksalmon: [ 233, 150, 122 ],
+      darkseagreen: [ 143, 188, 143 ],
+      darkslateblue: [ 72, 61, 139 ],
+      darkslategray: [ 47, 79, 79 ],
+      darkslategrey: [ 47, 79, 79 ],
+      darkturquoise: [ 0, 206, 209 ],
+      darkviolet: [ 148, 0, 211 ],
+      deeppink: [ 255, 20, 147 ],
+      deepskyblue: [ 0, 191, 255 ],
+      dimgray: [ 105, 105, 105 ],
+      dimgrey: [ 105, 105, 105 ],
+      dodgerblue: [ 30, 144, 255 ],
+      firebrick: [ 178, 34, 34 ],
+      floralwhite: [ 255, 250, 240 ],
+      forestgreen: [ 34, 139, 34 ],
+      fuchsia: [ 255, 0, 255 ],
+      gainsboro: [ 220, 220, 220 ],
+      ghostwhite: [ 248, 248, 255 ],
+      gold: [ 255, 215, 0 ],
+      goldenrod: [ 218, 165, 32 ],
+      gray: [ 128, 128, 128 ],
+      green: [ 0, 128, 0 ],
+      greenyellow: [ 173, 255, 47 ],
+      grey: [ 128, 128, 128 ],
+      honeydew: [ 240, 255, 240 ],
+      hotpink: [ 255, 105, 180 ],
+      indianred: [ 205, 92, 92 ],
+      indigo: [ 75, 0, 130 ],
+      ivory: [ 255, 255, 240 ],
+      khaki: [ 240, 230, 140 ],
+      lavender: [ 230, 230, 250 ],
+      lavenderblush: [ 255, 240, 245 ],
+      lawngreen: [ 124, 252, 0 ],
+      lemonchiffon: [ 255, 250, 205 ],
+      lightblue: [ 173, 216, 230 ],
+      lightcoral: [ 240, 128, 128 ],
+      lightcyan: [ 224, 255, 255 ],
+      lightgoldenrodyellow: [ 250, 250, 210 ],
+      lightgray: [ 211, 211, 211 ],
+      lightgreen: [ 144, 238, 144 ],
+      lightgrey: [ 211, 211, 211 ],
+      lightpink: [ 255, 182, 193 ],
+      lightsalmon: [ 255, 160, 122 ],
+      lightseagreen: [ 32, 178, 170 ],
+      lightskyblue: [ 135, 206, 250 ],
+      lightslategray: [ 119, 136, 153 ],
+      lightslategrey: [ 119, 136, 153 ],
+      lightsteelblue: [ 176, 196, 222 ],
+      lightyellow: [ 255, 255, 224 ],
+      lime: [ 0, 255, 0 ],
+      limegreen: [ 50, 205, 50 ],
+      linen: [ 250, 240, 230 ],
+      magenta: [ 255, 0, 255 ],
+      maroon: [ 128, 0, 0 ],
+      mediumaquamarine: [ 102, 205, 170 ],
+      mediumblue: [ 0, 0, 205 ],
+      mediumorchid: [ 186, 85, 211 ],
+      mediumpurple: [ 147, 112, 219 ],
+      mediumseagreen: [ 60, 179, 113 ],
+      mediumslateblue: [ 123, 104, 238 ],
+      mediumspringgreen: [ 0, 250, 154 ],
+      mediumturquoise: [ 72, 209, 204 ],
+      mediumvioletred: [ 199, 21, 133 ],
+      midnightblue: [ 25, 25, 112 ],
+      mintcream: [ 245, 255, 250 ],
+      mistyrose: [ 255, 228, 225 ],
+      moccasin: [ 255, 228, 181 ],
+      navajowhite: [ 255, 222, 173 ],
+      navy: [ 0, 0, 128 ],
+      oldlace: [ 253, 245, 230 ],
+      olive: [ 128, 128, 0 ],
+      olivedrab: [ 107, 142, 35 ],
+      orange: [ 255, 165, 0 ],
+      orangered: [ 255, 69, 0 ],
+      orchid: [ 218, 112, 214 ],
+      palegoldenrod: [ 238, 232, 170 ],
+      palegreen: [ 152, 251, 152 ],
+      paleturquoise: [ 175, 238, 238 ],
+      palevioletred: [ 219, 112, 147 ],
+      papayawhip: [ 255, 239, 213 ],
+      peachpuff: [ 255, 218, 185 ],
+      peru: [ 205, 133, 63 ],
+      pink: [ 255, 192, 203 ],
+      plum: [ 221, 160, 221 ],
+      powderblue: [ 176, 224, 230 ],
+      purple: [ 128, 0, 128 ],
+      rebeccapurple: [ 102, 51, 153 ],
+      red: [ 255, 0, 0 ],
+      rosybrown: [ 188, 143, 143 ],
+      royalblue: [ 65, 105, 225 ],
+      saddlebrown: [ 139, 69, 19 ],
+      salmon: [ 250, 128, 114 ],
+      sandybrown: [ 244, 164, 96 ],
+      seagreen: [ 46, 139, 87 ],
+      seashell: [ 255, 245, 238 ],
+      sienna: [ 160, 82, 45 ],
+      silver: [ 192, 192, 192 ],
+      skyblue: [ 135, 206, 235 ],
+      slateblue: [ 106, 90, 205 ],
+      slategray: [ 112, 128, 144 ],
+      slategrey: [ 112, 128, 144 ],
+      snow: [ 255, 250, 250 ],
+      springgreen: [ 0, 255, 127 ],
+      steelblue: [ 70, 130, 180 ],
+      tan: [ 210, 180, 140 ],
+      teal: [ 0, 128, 128 ],
+      thistle: [ 216, 191, 216 ],
+      tomato: [ 255, 99, 71 ],
+      turquoise: [ 64, 224, 208 ],
+      violet: [ 238, 130, 238 ],
+      wheat: [ 245, 222, 179 ],
+      white: [ 255, 255, 255 ],
+      whitesmoke: [ 245, 245, 245 ],
+      yellow: [ 255, 255, 0 ],
+      yellowgreen: [ 154, 205, 50 ]
+    };
+    var css_colors_default = cssColors;
+    var originals = {
+      ariaAttrs: aria_attrs_default,
+      ariaRoles: _extends({}, aria_roles_default, dpub_roles_default, graphics_roles_default),
+      htmlElms: html_elms_default,
+      cssColors: css_colors_default
+    };
+    var standards = _extends({}, originals);
+    function configureStandards(config) {
+      Object.keys(standards).forEach(function(propName) {
+        if (config[propName]) {
+          standards[propName] = deep_merge_default(standards[propName], config[propName]);
+        }
       });
-      if (!implicitRole3) {
-        return null;
-      }
-      var presentationalRole = getInheritedRole(vNode, explicitRoleOptions);
-      if (presentationalRole) {
-        return presentationalRole;
-      }
-      return implicitRole3;
     }
-    function hasConflictResolution(vNode) {
-      var hasGlobalAria = get_global_aria_attrs_default().some(function(attr) {
-        return vNode.hasAttr(attr);
+    function resetStandards() {
+      Object.keys(standards).forEach(function(propName) {
+        standards[propName] = originals[propName];
       });
-      return hasGlobalAria || _isFocusable(vNode);
-    }
-    function resolveRole(node) {
-      var _ref31 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-      var noImplicit = _ref31.noImplicit, roleOptions = _objectWithoutProperties(_ref31, _excluded7);
-      var _nodeLookup10 = _nodeLookup(node), vNode = _nodeLookup10.vNode;
-      if (vNode.props.nodeType !== 1) {
-        return null;
-      }
-      var explicitRole2 = get_explicit_role_default(vNode, roleOptions);
-      if (!explicitRole2) {
-        return noImplicit ? null : resolveImplicitRole(vNode, roleOptions);
-      }
-      if (![ 'presentation', 'none' ].includes(explicitRole2)) {
-        return explicitRole2;
-      }
-      if (hasConflictResolution(vNode)) {
-        return noImplicit ? null : resolveImplicitRole(vNode, roleOptions);
-      }
-      return explicitRole2;
     }
-    function getRole(node) {
-      var _ref32 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-      var noPresentational = _ref32.noPresentational, options = _objectWithoutProperties(_ref32, _excluded8);
-      var role = resolveRole(node, options);
-      if (noPresentational && [ 'presentation', 'none' ].includes(role)) {
-        return null;
-      }
-      return role;
+    var standards_default = standards;
+    function isUnsupportedRole(role) {
+      var roleDefinition = standards_default.ariaRoles[role];
+      return roleDefinition ? !!roleDefinition.unsupported : false;
     }
-    var get_role_default = getRole;
-    var alwaysTitleElements = [ 'iframe' ];
-    function titleText(node) {
-      var _nodeLookup11 = _nodeLookup(node), vNode = _nodeLookup11.vNode;
-      if (vNode.props.nodeType !== 1 || !node.hasAttr('title')) {
-        return '';
-      }
-      if (!matches_default(vNode, alwaysTitleElements) && [ 'none', 'presentation' ].includes(get_role_default(vNode))) {
-        return '';
+    var is_unsupported_role_default = isUnsupportedRole;
+    function isValidRole(role) {
+      var _ref26 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, allowAbstract = _ref26.allowAbstract, _ref26$flagUnsupporte = _ref26.flagUnsupported, flagUnsupported = _ref26$flagUnsupporte === void 0 ? false : _ref26$flagUnsupporte;
+      var roleDefinition = standards_default.ariaRoles[role];
+      var isRoleUnsupported = is_unsupported_role_default(role);
+      if (!roleDefinition || flagUnsupported && isRoleUnsupported) {
+        return false;
       }
-      return vNode.attr('title');
+      return allowAbstract ? true : roleDefinition.type !== 'abstract';
     }
-    var title_text_default = titleText;
-    function namedFromContents(vNode) {
-      var _ref33 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, strict = _ref33.strict;
+    var is_valid_role_default = isValidRole;
+    function getExplicitRole(vNode) {
+      var _ref27 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, fallback = _ref27.fallback, abstracts = _ref27.abstracts, dpub = _ref27.dpub;
       vNode = vNode instanceof abstract_virtual_node_default ? vNode : get_node_from_tree_default(vNode);
       if (vNode.props.nodeType !== 1) {
-        return false;
-      }
-      var role = get_role_default(vNode);
-      var roleDef = standards_default.ariaRoles[role];
-      if (roleDef && roleDef.nameFromContent) {
-        return true;
+        return null;
       }
-      if (strict) {
+      var roleAttr = (vNode.attr('role') || '').trim().toLowerCase();
+      var roleList = fallback ? token_list_default(roleAttr) : [ roleAttr ];
+      var firstValidRole = roleList.find(function(role) {
+        if (!dpub && role.substr(0, 4) === 'doc-') {
+          return false;
+        }
+        return is_valid_role_default(role, {
+          allowAbstract: abstracts
+        });
+      });
+      return firstValidRole || null;
+    }
+    var get_explicit_role_default = getExplicitRole;
+    function getElementsByContentType(type2) {
+      return Object.keys(standards_default.htmlElms).filter(function(nodeName2) {
+        var elm = standards_default.htmlElms[nodeName2];
+        if (elm.contentTypes) {
+          return elm.contentTypes.includes(type2);
+        }
+        if (!elm.variant) {
+          return false;
+        }
+        if (elm.variant['default'] && elm.variant['default'].contentTypes) {
+          return elm.variant['default'].contentTypes.includes(type2);
+        }
         return false;
-      }
-      return !roleDef || [ 'presentation', 'none' ].includes(role);
+      });
     }
-    var named_from_contents_default = namedFromContents;
-    function getOwnedVirtual(virtualNode) {
-      var actualNode = virtualNode.actualNode, children = virtualNode.children;
-      if (!children) {
-        throw new Error('getOwnedVirtual requires a virtual node');
-      }
-      if (virtualNode.hasAttr('aria-owns')) {
-        var owns = idrefs_default(actualNode, 'aria-owns').filter(function(element) {
-          return !!element;
-        }).map(function(element) {
-          return axe.utils.getNodeFromTree(element);
+    var get_elements_by_content_type_default = getElementsByContentType;
+    function getGlobalAriaAttrs() {
+      return cache_default.get('globalAriaAttrs', function() {
+        return Object.keys(standards_default.ariaAttrs).filter(function(attrName) {
+          return standards_default.ariaAttrs[attrName].global;
         });
-        return [].concat(_toConsumableArray(children), _toConsumableArray(owns));
-      }
-      return _toConsumableArray(children);
+      });
     }
-    var get_owned_virtual_default = getOwnedVirtual;
-    var unsupported_default = {
-      accessibleNameFromFieldValue: [ 'progressbar' ]
-    };
-    function _isVisibleToScreenReaders(vNode) {
-      vNode = _nodeLookup(vNode).vNode;
-      return isVisibleToScreenReadersVirtual(vNode);
+    var get_global_aria_attrs_default = getGlobalAriaAttrs;
+    function toGrid(node) {
+      var table = [];
+      var rows = node.rows;
+      for (var i = 0, rowLength = rows.length; i < rowLength; i++) {
+        var cells = rows[i].cells;
+        table[i] = table[i] || [];
+        var columnIndex = 0;
+        for (var j = 0, cellLength = cells.length; j < cellLength; j++) {
+          for (var colSpan = 0; colSpan < cells[j].colSpan; colSpan++) {
+            var rowspanAttr = cells[j].getAttribute('rowspan');
+            var rowspanValue = parseInt(rowspanAttr) === 0 || cells[j].rowspan === 0 ? rows.length : cells[j].rowSpan;
+            for (var rowSpan = 0; rowSpan < rowspanValue; rowSpan++) {
+              table[i + rowSpan] = table[i + rowSpan] || [];
+              while (table[i + rowSpan][columnIndex]) {
+                columnIndex++;
+              }
+              table[i + rowSpan][columnIndex] = cells[j];
+            }
+            columnIndex++;
+          }
+        }
+      }
+      return table;
     }
-    var isVisibleToScreenReadersVirtual = memoize_default(function isVisibleToScreenReadersMemoized(vNode, isAncestor) {
-      if (ariaHidden(vNode) || _isInert(vNode, {
-        skipAncestors: true,
-        isAncestor: isAncestor
-      })) {
-        return false;
+    var to_grid_default = memoize_default(toGrid);
+    function getCellPosition(cell, tableGrid) {
+      var rowIndex, index;
+      if (!tableGrid) {
+        tableGrid = to_grid_default(find_up_default(cell, 'table'));
       }
-      if (vNode.actualNode && vNode.props.nodeName === 'area') {
-        return !areaHidden(vNode, isVisibleToScreenReadersVirtual);
+      for (rowIndex = 0; rowIndex < tableGrid.length; rowIndex++) {
+        if (tableGrid[rowIndex]) {
+          index = tableGrid[rowIndex].indexOf(cell);
+          if (index !== -1) {
+            return {
+              x: index,
+              y: rowIndex
+            };
+          }
+        }
       }
-      if (_isHiddenForEveryone(vNode, {
-        skipAncestors: true,
-        isAncestor: isAncestor
-      })) {
+    }
+    var get_cell_position_default = memoize_default(getCellPosition);
+    function _getScope(el) {
+      var _nodeLookup9 = _nodeLookup(el), vNode = _nodeLookup9.vNode, cell = _nodeLookup9.domNode;
+      var scope = vNode.attr('scope');
+      var role = vNode.attr('role');
+      if (![ 'td', 'th' ].includes(vNode.props.nodeName)) {
+        throw new TypeError('Expected TD or TH element');
+      }
+      if (role === 'columnheader') {
+        return 'col';
+      } else if (role === 'rowheader') {
+        return 'row';
+      } else if (scope === 'col' || scope === 'row') {
+        return scope;
+      } else if (vNode.props.nodeName !== 'th') {
         return false;
+      } else if (!vNode.actualNode) {
+        return 'auto';
       }
-      if (!vNode.parent) {
-        return true;
+      var tableGrid = to_grid_default(find_up_default(cell, 'table'));
+      var pos = get_cell_position_default(cell, tableGrid);
+      var headerRow = tableGrid[pos.y].every(function(node) {
+        return node.nodeName.toUpperCase() === 'TH';
+      });
+      if (headerRow) {
+        return 'col';
       }
-      return isVisibleToScreenReadersVirtual(vNode.parent, true);
-    });
-    function visibleVirtual(element, screenReader, noRecursing) {
-      var _nodeLookup12 = _nodeLookup(element), vNode = _nodeLookup12.vNode;
-      var visibleMethod = screenReader ? _isVisibleToScreenReaders : _isVisibleOnScreen;
-      var visible2 = !element.actualNode || element.actualNode && visibleMethod(element);
-      var result = vNode.children.map(function(child) {
-        var _child$props = child.props, nodeType = _child$props.nodeType, nodeValue = _child$props.nodeValue;
-        if (nodeType === 3) {
-          if (nodeValue && visible2) {
-            return nodeValue;
-          }
-        } else if (!noRecursing) {
-          return visibleVirtual(child, screenReader);
-        }
-      }).join('');
-      return sanitize_default(result);
-    }
-    var visible_virtual_default = visibleVirtual;
-    var nonTextInputTypes = [ 'button', 'checkbox', 'color', 'file', 'hidden', 'image', 'password', 'radio', 'reset', 'submit' ];
-    function isNativeTextbox(node) {
-      node = node instanceof abstract_virtual_node_default ? node : get_node_from_tree_default(node);
-      var nodeName2 = node.props.nodeName;
-      return nodeName2 === 'textarea' || nodeName2 === 'input' && !nonTextInputTypes.includes((node.attr('type') || '').toLowerCase());
-    }
-    var is_native_textbox_default = isNativeTextbox;
-    function isNativeSelect(node) {
-      node = node instanceof abstract_virtual_node_default ? node : get_node_from_tree_default(node);
-      var nodeName2 = node.props.nodeName;
-      return nodeName2 === 'select';
+      var headerCol = tableGrid.map(function(col) {
+        return col[pos.x];
+      }).every(function(node) {
+        return node && node.nodeName.toUpperCase() === 'TH';
+      });
+      if (headerCol) {
+        return 'row';
+      }
+      return 'auto';
     }
-    var is_native_select_default = isNativeSelect;
-    function isAriaTextbox(node) {
-      var role = get_explicit_role_default(node);
-      return role === 'textbox';
+    function isColumnHeader(element) {
+      return [ 'col', 'auto' ].indexOf(_getScope(element)) !== -1;
     }
-    var is_aria_textbox_default = isAriaTextbox;
-    function isAriaListbox(node) {
-      var role = get_explicit_role_default(node);
-      return role === 'listbox';
+    var is_column_header_default = isColumnHeader;
+    function isRowHeader(cell) {
+      return [ 'row', 'auto' ].includes(_getScope(cell));
     }
-    var is_aria_listbox_default = isAriaListbox;
-    function isAriaCombobox(node) {
-      var role = get_explicit_role_default(node);
-      return role === 'combobox';
+    var is_row_header_default = isRowHeader;
+    function sanitize(str) {
+      if (!str) {
+        return '';
+      }
+      return str.replace(/\r\n/g, '\n').replace(/\u00A0/g, ' ').replace(/[\s]{2,}/g, ' ').trim();
     }
-    var is_aria_combobox_default = isAriaCombobox;
-    var rangeRoles = [ 'progressbar', 'scrollbar', 'slider', 'spinbutton' ];
-    function isAriaRange(node) {
-      var role = get_explicit_role_default(node);
-      return rangeRoles.includes(role);
+    var sanitize_default = sanitize;
+    var getSectioningElementSelector = function getSectioningElementSelector() {
+      return cache_default.get('sectioningElementSelector', function() {
+        return get_elements_by_content_type_default('sectioning').map(function(nodeName2) {
+          return ''.concat(nodeName2, ':not([role])');
+        }).join(', ') + ' , main:not([role]), [role=article], [role=complementary], [role=main], [role=navigation], [role=region]';
+      });
+    };
+    function hasAccessibleName(vNode) {
+      var ariaLabelledby = sanitize_default(arialabelledby_text_default(vNode));
+      var ariaLabel = sanitize_default(_arialabelText(vNode));
+      return !!(ariaLabelledby || ariaLabel);
     }
-    var is_aria_range_default = isAriaRange;
-    var controlValueRoles = [ 'textbox', 'progressbar', 'scrollbar', 'slider', 'spinbutton', 'combobox', 'listbox' ];
-    var _formControlValueMethods = {
-      nativeTextboxValue: nativeTextboxValue,
-      nativeSelectValue: nativeSelectValue,
-      ariaTextboxValue: ariaTextboxValue,
-      ariaListboxValue: ariaListboxValue,
-      ariaComboboxValue: ariaComboboxValue,
-      ariaRangeValue: ariaRangeValue
+    var implicitHtmlRoles = {
+      a: function a(vNode) {
+        return vNode.hasAttr('href') ? 'link' : null;
+      },
+      area: function area(vNode) {
+        return vNode.hasAttr('href') ? 'link' : null;
+      },
+      article: 'article',
+      aside: 'complementary',
+      body: 'document',
+      button: 'button',
+      datalist: 'listbox',
+      dd: 'definition',
+      dfn: 'term',
+      details: 'group',
+      dialog: 'dialog',
+      dt: 'term',
+      fieldset: 'group',
+      figure: 'figure',
+      footer: function footer(vNode) {
+        var sectioningElement = closest_default(vNode, getSectioningElementSelector());
+        return !sectioningElement ? 'contentinfo' : null;
+      },
+      form: function form(vNode) {
+        return hasAccessibleName(vNode) ? 'form' : null;
+      },
+      h1: 'heading',
+      h2: 'heading',
+      h3: 'heading',
+      h4: 'heading',
+      h5: 'heading',
+      h6: 'heading',
+      header: function header(vNode) {
+        var sectioningElement = closest_default(vNode, getSectioningElementSelector());
+        return !sectioningElement ? 'banner' : null;
+      },
+      hr: 'separator',
+      img: function img(vNode) {
+        var emptyAlt = vNode.hasAttr('alt') && !vNode.attr('alt');
+        var hasGlobalAria = get_global_aria_attrs_default().find(function(attr) {
+          return vNode.hasAttr(attr);
+        });
+        return emptyAlt && !hasGlobalAria && !_isFocusable(vNode) ? 'presentation' : 'img';
+      },
+      input: function input(vNode) {
+        var suggestionsSourceElement;
+        if (vNode.hasAttr('list')) {
+          var listElement = idrefs_default(vNode.actualNode, 'list').filter(function(node) {
+            return !!node;
+          })[0];
+          suggestionsSourceElement = listElement && listElement.nodeName.toLowerCase() === 'datalist';
+        }
+        switch (vNode.props.type) {
+         case 'checkbox':
+          return 'checkbox';
+
+         case 'number':
+          return 'spinbutton';
+
+         case 'radio':
+          return 'radio';
+
+         case 'range':
+          return 'slider';
+
+         case 'search':
+          return !suggestionsSourceElement ? 'searchbox' : 'combobox';
+
+         case 'button':
+         case 'image':
+         case 'reset':
+         case 'submit':
+          return 'button';
+
+         case 'text':
+         case 'tel':
+         case 'url':
+         case 'email':
+         case '':
+          return !suggestionsSourceElement ? 'textbox' : 'combobox';
+
+         default:
+          return 'textbox';
+        }
+      },
+      li: 'listitem',
+      main: 'main',
+      math: 'math',
+      menu: 'list',
+      meter: 'meter',
+      nav: 'navigation',
+      ol: 'list',
+      optgroup: 'group',
+      option: 'option',
+      output: 'status',
+      progress: 'progressbar',
+      search: 'search',
+      section: function section(vNode) {
+        return hasAccessibleName(vNode) ? 'region' : null;
+      },
+      select: function select(vNode) {
+        return vNode.hasAttr('multiple') || parseInt(vNode.attr('size')) > 1 ? 'listbox' : 'combobox';
+      },
+      summary: 'button',
+      table: 'table',
+      tbody: 'rowgroup',
+      td: function td(vNode) {
+        var table = closest_default(vNode, 'table');
+        var role = get_explicit_role_default(table);
+        return [ 'grid', 'treegrid' ].includes(role) ? 'gridcell' : 'cell';
+      },
+      textarea: 'textbox',
+      tfoot: 'rowgroup',
+      th: function th(vNode) {
+        if (is_column_header_default(vNode)) {
+          return 'columnheader';
+        }
+        if (is_row_header_default(vNode)) {
+          return 'rowheader';
+        }
+      },
+      thead: 'rowgroup',
+      tr: 'row',
+      ul: 'list'
     };
-    function formControlValue(virtualNode) {
-      var context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-      var actualNode = virtualNode.actualNode;
-      var unsupportedRoles = unsupported_default.accessibleNameFromFieldValue || [];
-      var role = get_role_default(virtualNode);
-      if (context.startNode === virtualNode || !controlValueRoles.includes(role) || unsupportedRoles.includes(role)) {
-        return '';
+    var implicit_html_roles_default = implicitHtmlRoles;
+    function fromPrimative(someString, matcher) {
+      var matcherType = _typeof(matcher);
+      if (Array.isArray(matcher) && typeof someString !== 'undefined') {
+        return matcher.includes(someString);
       }
-      var valueMethods = Object.keys(_formControlValueMethods).map(function(name) {
-        return _formControlValueMethods[name];
-      });
-      var valueString = valueMethods.reduce(function(accName, step) {
-        return accName || step(virtualNode, context);
-      }, '');
-      if (context.debug) {
-        log_default(valueString || '{empty-value}', actualNode, context);
+      if (matcherType === 'function') {
+        return !!matcher(someString);
       }
-      return valueString;
-    }
-    function nativeTextboxValue(node) {
-      var _nodeLookup13 = _nodeLookup(node), vNode = _nodeLookup13.vNode;
-      if (is_native_textbox_default(vNode)) {
-        return vNode.props.value || '';
+      if (someString !== null && someString !== void 0) {
+        if (matcher instanceof RegExp) {
+          return matcher.test(someString);
+        }
+        if (/^\/.*\/$/.test(matcher)) {
+          var pattern = matcher.substring(1, matcher.length - 1);
+          return new RegExp(pattern).test(someString);
+        }
       }
-      return '';
+      return matcher === someString;
     }
-    function nativeSelectValue(node) {
-      var _nodeLookup14 = _nodeLookup(node), vNode = _nodeLookup14.vNode;
-      if (!is_native_select_default(vNode)) {
-        return '';
+    var from_primative_default = fromPrimative;
+    function hasAccessibleName2(vNode, matcher) {
+      return from_primative_default(!!_accessibleTextVirtual(vNode), matcher);
+    }
+    var has_accessible_name_default = hasAccessibleName2;
+    function fromFunction(getValue, matcher) {
+      var matcherType = _typeof(matcher);
+      if (matcherType !== 'object' || Array.isArray(matcher) || matcher instanceof RegExp) {
+        throw new Error('Expect matcher to be an object');
       }
-      var options = query_selector_all_default(vNode, 'option');
-      var selectedOptions = options.filter(function(option) {
-        return option.props.selected;
+      return Object.keys(matcher).every(function(propName) {
+        return from_primative_default(getValue(propName), matcher[propName]);
       });
-      if (!selectedOptions.length) {
-        selectedOptions.push(options[0]);
-      }
-      return selectedOptions.map(function(option) {
-        return visible_virtual_default(option);
-      }).join(' ') || '';
     }
-    function ariaTextboxValue(node) {
-      var _nodeLookup15 = _nodeLookup(node), vNode = _nodeLookup15.vNode, domNode = _nodeLookup15.domNode;
-      if (!is_aria_textbox_default(vNode)) {
-        return '';
-      }
-      if (!domNode || domNode && !_isHiddenForEveryone(domNode)) {
-        return visible_virtual_default(vNode, true);
-      } else {
-        return domNode.textContent;
-      }
+    var from_function_default = fromFunction;
+    function attributes(vNode, matcher) {
+      vNode = _nodeLookup(vNode).vNode;
+      return from_function_default(function(attrName) {
+        return vNode.attr(attrName);
+      }, matcher);
+    }
+    var attributes_default = attributes;
+    function condition(arg, matcher) {
+      return !!matcher(arg);
     }
-    function ariaListboxValue(node, context) {
-      var _nodeLookup16 = _nodeLookup(node), vNode = _nodeLookup16.vNode;
-      if (!is_aria_listbox_default(vNode)) {
-        return '';
-      }
-      var selected = get_owned_virtual_default(vNode).filter(function(owned) {
-        return get_role_default(owned) === 'option' && owned.attr('aria-selected') === 'true';
-      });
-      if (selected.length === 0) {
-        return '';
-      }
-      return _accessibleTextVirtual(selected[0], context);
+    function explicitRole(vNode, matcher) {
+      return from_primative_default(get_explicit_role_default(vNode), matcher);
     }
-    function ariaComboboxValue(node, context) {
-      var _nodeLookup17 = _nodeLookup(node), vNode = _nodeLookup17.vNode;
-      if (!is_aria_combobox_default(vNode)) {
-        return '';
-      }
-      var listbox = get_owned_virtual_default(vNode).filter(function(elm) {
-        return get_role_default(elm) === 'listbox';
-      })[0];
-      return listbox ? ariaListboxValue(listbox, context) : '';
+    var explicit_role_default = explicitRole;
+    function implicitRole(vNode, matcher) {
+      return from_primative_default(implicit_role_default(vNode), matcher);
     }
-    function ariaRangeValue(node) {
-      var _nodeLookup18 = _nodeLookup(node), vNode = _nodeLookup18.vNode;
-      if (!is_aria_range_default(vNode) || !vNode.hasAttr('aria-valuenow')) {
-        return '';
-      }
-      var valueNow = +vNode.attr('aria-valuenow');
-      return !isNaN(valueNow) ? String(valueNow) : '0';
+    var implicit_role_default2 = implicitRole;
+    function nodeName(vNode, matcher) {
+      vNode = _nodeLookup(vNode).vNode;
+      return from_primative_default(vNode.props.nodeName, matcher);
     }
-    var form_control_value_default = formControlValue;
-    function subtreeText(virtualNode) {
-      var context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-      var alreadyProcessed2 = _accessibleTextVirtual.alreadyProcessed;
-      context.startNode = context.startNode || virtualNode;
-      var _context = context, strict = _context.strict, inControlContext = _context.inControlContext, inLabelledByContext = _context.inLabelledByContext;
-      var role = get_role_default(virtualNode);
-      var _get_element_spec_def2 = get_element_spec_default(virtualNode, {
-        noMatchAccessibleName: true
-      }), contentTypes = _get_element_spec_def2.contentTypes;
-      if (alreadyProcessed2(virtualNode, context) || virtualNode.props.nodeType !== 1 || contentTypes !== null && contentTypes !== void 0 && contentTypes.includes('embedded') || controlValueRoles.includes(role)) {
-        return '';
-      }
-      if (!context.subtreeDescendant && !context.inLabelledByContext && !named_from_contents_default(virtualNode, {
-        strict: strict
-      })) {
-        return '';
+    var node_name_default = nodeName;
+    function properties(vNode, matcher) {
+      vNode = _nodeLookup(vNode).vNode;
+      return from_function_default(function(propName) {
+        return vNode.props[propName];
+      }, matcher);
+    }
+    var properties_default = properties;
+    function semanticRole(vNode, matcher) {
+      return from_primative_default(get_role_default(vNode), matcher);
+    }
+    var semantic_role_default = semanticRole;
+    var matchers = {
+      hasAccessibleName: has_accessible_name_default,
+      attributes: attributes_default,
+      condition: condition,
+      explicitRole: explicit_role_default,
+      implicitRole: implicit_role_default2,
+      nodeName: node_name_default,
+      properties: properties_default,
+      semanticRole: semantic_role_default
+    };
+    function fromDefinition(vNode, definition) {
+      vNode = _nodeLookup(vNode).vNode;
+      if (Array.isArray(definition)) {
+        return definition.some(function(definitionItem) {
+          return fromDefinition(vNode, definitionItem);
+        });
       }
-      if (!strict) {
-        var subtreeDescendant = !inControlContext && !inLabelledByContext;
-        context = _extends({
-          subtreeDescendant: subtreeDescendant
-        }, context);
+      if (typeof definition === 'string') {
+        return _matches(vNode, definition);
       }
-      return get_owned_virtual_default(virtualNode).reduce(function(contentText, child) {
-        return appendAccessibleText(contentText, child, context);
-      }, '');
+      return Object.keys(definition).every(function(matcherName) {
+        if (!matchers[matcherName]) {
+          throw new Error('Unknown matcher type "'.concat(matcherName, '"'));
+        }
+        var matchMethod = matchers[matcherName];
+        var matcher = definition[matcherName];
+        return matchMethod(vNode, matcher);
+      });
     }
-    var phrasingElements = get_elements_by_content_type_default('phrasing').concat([ '#text' ]);
-    function appendAccessibleText(contentText, virtualNode, context) {
-      var nodeName2 = virtualNode.props.nodeName;
-      var contentTextAdd = _accessibleTextVirtual(virtualNode, context);
-      if (!contentTextAdd) {
-        return contentText;
+    var from_definition_default = fromDefinition;
+    function matches2(vNode, definition) {
+      return from_definition_default(vNode, definition);
+    }
+    var matches_default = matches2;
+    matches_default.hasAccessibleName = has_accessible_name_default;
+    matches_default.attributes = attributes_default;
+    matches_default.condition = condition;
+    matches_default.explicitRole = explicit_role_default;
+    matches_default.fromDefinition = from_definition_default;
+    matches_default.fromFunction = from_function_default;
+    matches_default.fromPrimative = from_primative_default;
+    matches_default.implicitRole = implicit_role_default2;
+    matches_default.nodeName = node_name_default;
+    matches_default.properties = properties_default;
+    matches_default.semanticRole = semantic_role_default;
+    var matches_default2 = matches_default;
+    function getElementSpec(vNode) {
+      var _ref28 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, _ref28$noMatchAccessi = _ref28.noMatchAccessibleName, noMatchAccessibleName = _ref28$noMatchAccessi === void 0 ? false : _ref28$noMatchAccessi;
+      var standard = standards_default.htmlElms[vNode.props.nodeName];
+      if (!standard) {
+        return {};
       }
-      if (!phrasingElements.includes(nodeName2)) {
-        if (contentTextAdd[0] !== ' ') {
-          contentTextAdd += ' ';
+      if (!standard.variant) {
+        return standard;
+      }
+      var variant = standard.variant, spec = _objectWithoutProperties(standard, _excluded4);
+      for (var variantName in variant) {
+        if (!variant.hasOwnProperty(variantName) || variantName === 'default') {
+          continue;
         }
-        if (contentText && contentText[contentText.length - 1] !== ' ') {
-          contentTextAdd = ' ' + contentTextAdd;
+        var _variant$variantName = variant[variantName], matches4 = _variant$variantName.matches, props = _objectWithoutProperties(_variant$variantName, _excluded5);
+        var matchProperties = Array.isArray(matches4) ? matches4 : [ matches4 ];
+        for (var _i9 = 0; _i9 < matchProperties.length && noMatchAccessibleName; _i9++) {
+          if (matchProperties[_i9].hasOwnProperty('hasAccessibleName')) {
+            return standard;
+          }
+        }
+        if (matches_default2(vNode, matches4)) {
+          for (var propName in props) {
+            if (props.hasOwnProperty(propName)) {
+              spec[propName] = props[propName];
+            }
+          }
         }
       }
-      return contentText + contentTextAdd;
-    }
-    var subtree_text_default = subtreeText;
-    function labelText(virtualNode) {
-      var context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-      var alreadyProcessed2 = _accessibleTextVirtual.alreadyProcessed;
-      if (context.inControlContext || context.inLabelledByContext || alreadyProcessed2(virtualNode, context)) {
-        return '';
-      }
-      if (!context.startNode) {
-        context.startNode = virtualNode;
-      }
-      var labelContext = _extends({
-        inControlContext: true
-      }, context);
-      var explicitLabels = getExplicitLabels(virtualNode);
-      var implicitLabel = closest_default(virtualNode, 'label');
-      var labels;
-      if (implicitLabel) {
-        labels = [].concat(_toConsumableArray(explicitLabels), [ implicitLabel.actualNode ]);
-        labels.sort(node_sorter_default);
-      } else {
-        labels = explicitLabels;
+      for (var _propName in variant['default']) {
+        if (variant['default'].hasOwnProperty(_propName) && typeof spec[_propName] === 'undefined') {
+          spec[_propName] = variant['default'][_propName];
+        }
       }
-      return labels.map(function(label3) {
-        return accessible_text_default(label3, labelContext);
-      }).filter(function(text) {
-        return text !== '';
-      }).join(' ');
+      return spec;
     }
-    function getExplicitLabels(virtualNode) {
-      if (!virtualNode.attr('id')) {
-        return [];
+    var get_element_spec_default = getElementSpec;
+    function implicitRole2(node) {
+      var _ref29 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, chromium = _ref29.chromium;
+      var vNode = node instanceof abstract_virtual_node_default ? node : get_node_from_tree_default(node);
+      node = vNode.actualNode;
+      if (!vNode) {
+        throw new ReferenceError('Cannot get implicit role of a node outside the current scope.');
       }
-      if (!virtualNode.actualNode) {
-        throw new TypeError('Cannot resolve explicit label reference for non-DOM nodes');
+      var nodeName2 = vNode.props.nodeName;
+      var role = implicit_html_roles_default[nodeName2];
+      if (!role && chromium) {
+        var _get_element_spec_def = get_element_spec_default(vNode), chromiumRole = _get_element_spec_def.chromiumRole;
+        return chromiumRole || null;
       }
-      return find_elms_in_context_default({
-        elm: 'label',
-        attr: 'for',
-        value: virtualNode.attr('id'),
-        context: virtualNode.actualNode
-      });
+      if (typeof role === 'function') {
+        return role(vNode);
+      }
+      return role || null;
     }
-    var label_text_default = labelText;
-    var defaultButtonValues = {
-      submit: 'Submit',
-      image: 'Submit',
-      reset: 'Reset',
-      button: ''
-    };
-    var nativeTextMethods = {
-      valueText: function valueText(_ref34) {
-        var actualNode = _ref34.actualNode;
-        return actualNode.value || '';
-      },
-      buttonDefaultText: function buttonDefaultText(_ref35) {
-        var actualNode = _ref35.actualNode;
-        return defaultButtonValues[actualNode.type] || '';
-      },
-      tableCaptionText: descendantText.bind(null, 'caption'),
-      figureText: descendantText.bind(null, 'figcaption'),
-      svgTitleText: descendantText.bind(null, 'title'),
-      fieldsetLegendText: descendantText.bind(null, 'legend'),
-      altText: attrText.bind(null, 'alt'),
-      tableSummaryText: attrText.bind(null, 'summary'),
-      titleText: title_text_default,
-      subtreeText: subtree_text_default,
-      labelText: label_text_default,
-      singleSpace: function singleSpace() {
-        return ' ';
-      },
-      placeholderText: attrText.bind(null, 'placeholder')
+    var implicit_role_default = implicitRole2;
+    var inheritsPresentationChain = {
+      td: [ 'tr' ],
+      th: [ 'tr' ],
+      tr: [ 'thead', 'tbody', 'tfoot', 'table' ],
+      thead: [ 'table' ],
+      tbody: [ 'table' ],
+      tfoot: [ 'table' ],
+      li: [ 'ol', 'ul' ],
+      dt: [ 'dl', 'div' ],
+      dd: [ 'dl', 'div' ],
+      div: [ 'dl' ]
     };
-    function attrText(attr, vNode) {
-      return vNode.attr(attr) || '';
-    }
-    function descendantText(nodeName2, _ref36, context) {
-      var actualNode = _ref36.actualNode;
-      nodeName2 = nodeName2.toLowerCase();
-      var nodeNames2 = [ nodeName2, actualNode.nodeName.toLowerCase() ].join(',');
-      var candidate = actualNode.querySelector(nodeNames2);
-      if (!candidate || candidate.nodeName.toLowerCase() !== nodeName2) {
-        return '';
+    function getInheritedRole(vNode, explicitRoleOptions) {
+      var parentNodeNames = inheritsPresentationChain[vNode.props.nodeName];
+      if (!parentNodeNames) {
+        return null;
       }
-      return accessible_text_default(candidate, context);
-    }
-    var native_text_methods_default = nativeTextMethods;
-    function _nativeTextAlternative(virtualNode) {
-      var context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-      var actualNode = virtualNode.actualNode;
-      if (virtualNode.props.nodeType !== 1 || [ 'presentation', 'none' ].includes(get_role_default(virtualNode))) {
-        return '';
+      if (!vNode.parent) {
+        if (!vNode.actualNode) {
+          return null;
+        }
+        throw new ReferenceError('Cannot determine role presentational inheritance of a required parent outside the current scope.');
       }
-      var textMethods = findTextMethods(virtualNode);
-      var accessibleName = textMethods.reduce(function(accName, step) {
-        return accName || step(virtualNode, context);
-      }, '');
-      if (context.debug) {
-        axe.log(accessibleName || '{empty-value}', actualNode, context);
+      if (!parentNodeNames.includes(vNode.parent.props.nodeName)) {
+        return null;
       }
-      return accessibleName;
+      var parentRole = get_explicit_role_default(vNode.parent, explicitRoleOptions);
+      if ([ 'none', 'presentation' ].includes(parentRole) && !hasConflictResolution(vNode.parent)) {
+        return parentRole;
+      }
+      if (parentRole) {
+        return null;
+      }
+      return getInheritedRole(vNode.parent, explicitRoleOptions);
     }
-    function findTextMethods(virtualNode) {
-      var elmSpec = get_element_spec_default(virtualNode, {
-        noMatchAccessibleName: true
-      });
-      var methods = elmSpec.namingMethods || [];
-      return methods.map(function(methodName) {
-        return native_text_methods_default[methodName];
+    function resolveImplicitRole(vNode, _ref30) {
+      var chromium = _ref30.chromium, explicitRoleOptions = _objectWithoutProperties(_ref30, _excluded6);
+      var implicitRole3 = implicit_role_default(vNode, {
+        chromium: chromium
       });
+      if (!implicitRole3) {
+        return null;
+      }
+      var presentationalRole = getInheritedRole(vNode, explicitRoleOptions);
+      if (presentationalRole) {
+        return presentationalRole;
+      }
+      return implicitRole3;
     }
-    function getUnicodeNonBmpRegExp() {
-      return /[\u1D00-\u1D7F\u1D80-\u1DBF\u1DC0-\u1DFF\u20A0-\u20CF\u20D0-\u20FF\u2100-\u214F\u2150-\u218F\u2190-\u21FF\u2200-\u22FF\u2300-\u23FF\u2400-\u243F\u2440-\u245F\u2460-\u24FF\u2500-\u257F\u2580-\u259F\u25A0-\u25FF\u2600-\u26FF\u2700-\u27BF\uE000-\uF8FF]/g;
+    function hasConflictResolution(vNode) {
+      var hasGlobalAria = get_global_aria_attrs_default().some(function(attr) {
+        return vNode.hasAttr(attr);
+      });
+      return hasGlobalAria || _isFocusable(vNode);
     }
-    function getPunctuationRegExp() {
-      return /[\u2000-\u206F\u2E00-\u2E7F\\'!"#$%&\xa3\xa2\xa5\xa7\u20ac()*+,\-.\/:;<=>?@\[\]^_`{|}~\xb1]/g;
+    function resolveRole(node) {
+      var _ref31 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+      var noImplicit = _ref31.noImplicit, roleOptions = _objectWithoutProperties(_ref31, _excluded7);
+      var _nodeLookup10 = _nodeLookup(node), vNode = _nodeLookup10.vNode;
+      if (vNode.props.nodeType !== 1) {
+        return null;
+      }
+      var explicitRole2 = get_explicit_role_default(vNode, roleOptions);
+      if (!explicitRole2) {
+        return noImplicit ? null : resolveImplicitRole(vNode, roleOptions);
+      }
+      if (![ 'presentation', 'none' ].includes(explicitRole2)) {
+        return explicitRole2;
+      }
+      if (hasConflictResolution(vNode)) {
+        return noImplicit ? null : resolveImplicitRole(vNode, roleOptions);
+      }
+      return explicitRole2;
     }
-    function getSupplementaryPrivateUseRegExp() {
-      return /[\uDB80-\uDBBF][\uDC00-\uDFFF]/g;
+    function getRole(node) {
+      var _ref32 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+      var noPresentational = _ref32.noPresentational, options = _objectWithoutProperties(_ref32, _excluded8);
+      var role = resolveRole(node, options);
+      if (noPresentational && [ 'presentation', 'none' ].includes(role)) {
+        return null;
+      }
+      return role;
     }
-    function getCategoryFormatRegExp() {
-      return /[\xAD\u0600-\u0605\u061C\u06DD\u070F\u08E2\u180E\u200B-\u200F\u202A-\u202E\u2060-\u2064\u2066-\u206F\uFEFF\uFFF9-\uFFFB]|\uD804[\uDCBD\uDCCD]|\uD80D[\uDC30-\uDC38]|\uD82F[\uDCA0-\uDCA3]|\uD834[\uDD73-\uDD7A]|\uDB40[\uDC01\uDC20-\uDC7F]/g;
+    var get_role_default = getRole;
+    var alwaysTitleElements = [ 'iframe' ];
+    function titleText(node) {
+      var _nodeLookup11 = _nodeLookup(node), vNode = _nodeLookup11.vNode;
+      if (vNode.props.nodeType !== 1 || !node.hasAttr('title')) {
+        return '';
+      }
+      if (!matches_default(vNode, alwaysTitleElements) && [ 'none', 'presentation' ].includes(get_role_default(vNode))) {
+        return '';
+      }
+      return vNode.attr('title');
     }
-    var emoji_regex_default = function emoji_regex_default() {
-      return /[#*0-9]\uFE0F?\u20E3|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26AA\u26B0\u26B1\u26BD\u26BE\u26C4\u26C8\u26CF\u26D1\u26E9\u26F0-\u26F5\u26F7\u26F8\u26FA\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2757\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B55\u3030\u303D\u3297\u3299]\uFE0F?|[\u261D\u270C\u270D](?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?|[\u270A\u270B](?:\uD83C[\uDFFB-\uDFFF])?|[\u23E9-\u23EC\u23F0\u23F3\u25FD\u2693\u26A1\u26AB\u26C5\u26CE\u26D4\u26EA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2795-\u2797\u27B0\u27BF\u2B50]|\u26D3\uFE0F?(?:\u200D\uD83D\uDCA5)?|\u26F9(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|\u2764\uFE0F?(?:\u200D(?:\uD83D\uDD25|\uD83E\uDE79))?|\uD83C(?:[\uDC04\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]\uFE0F?|[\uDF85\uDFC2\uDFC7](?:\uD83C[\uDFFB-\uDFFF])?|[\uDFC4\uDFCA](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDFCB\uDFCC](?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF43\uDF45-\uDF4A\uDF4C-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uDDE6\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF]|\uDDE7\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF]|\uDDE8\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF]|\uDDE9\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF]|\uDDEA\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA]|\uDDEB\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7]|\uDDEC\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE]|\uDDED\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA]|\uDDEE\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9]|\uDDEF\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5]|\uDDF0\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF]|\uDDF1\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE]|\uDDF2\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF]|\uDDF3\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF]|\uDDF4\uD83C\uDDF2|\uDDF5\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE]|\uDDF6\uD83C\uDDE6|\uDDF7\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC]|\uDDF8\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF]|\uDDF9\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF]|\uDDFA\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF]|\uDDFB\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA]|\uDDFC\uD83C[\uDDEB\uDDF8]|\uDDFD\uD83C\uDDF0|\uDDFE\uD83C[\uDDEA\uDDF9]|\uDDFF\uD83C[\uDDE6\uDDF2\uDDFC]|\uDF44(?:\u200D\uD83D\uDFEB)?|\uDF4B(?:\u200D\uD83D\uDFE9)?|\uDFC3(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?|\uDFF3\uFE0F?(?:\u200D(?:\u26A7\uFE0F?|\uD83C\uDF08))?|\uDFF4(?:\u200D\u2620\uFE0F?|\uDB40\uDC67\uDB40\uDC62\uDB40(?:\uDC65\uDB40\uDC6E\uDB40\uDC67|\uDC73\uDB40\uDC63\uDB40\uDC74|\uDC77\uDB40\uDC6C\uDB40\uDC73)\uDB40\uDC7F)?)|\uD83D(?:[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3]\uFE0F?|[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC](?:\uD83C[\uDFFB-\uDFFF])?|[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4\uDEB5](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDD74\uDD90](?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?|[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC25\uDC27-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE41\uDE43\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED7\uDEDC-\uDEDF\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB\uDFF0]|\uDC08(?:\u200D\u2B1B)?|\uDC15(?:\u200D\uD83E\uDDBA)?|\uDC26(?:\u200D(?:\u2B1B|\uD83D\uDD25))?|\uDC3B(?:\u200D\u2744\uFE0F?)?|\uDC41\uFE0F?(?:\u200D\uD83D\uDDE8\uFE0F?)?|\uDC68(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDC68\uDC69]\u200D\uD83D(?:\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?)|[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?)|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFC-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFD-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFD\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFE])))?))?|\uDC69(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?[\uDC68\uDC69]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?|\uDC69\u200D\uD83D(?:\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?))|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFC-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB\uDFFD-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB-\uDFFD\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB-\uDFFE])))?))?|\uDC6F(?:\u200D[\u2640\u2642]\uFE0F?)?|\uDD75(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|\uDE2E(?:\u200D\uD83D\uDCA8)?|\uDE35(?:\u200D\uD83D\uDCAB)?|\uDE36(?:\u200D\uD83C\uDF2B\uFE0F?)?|\uDE42(?:\u200D[\u2194\u2195]\uFE0F?)?|\uDEB6(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?)|\uD83E(?:[\uDD0C\uDD0F\uDD18-\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5\uDEC3-\uDEC5\uDEF0\uDEF2-\uDEF8](?:\uD83C[\uDFFB-\uDFFF])?|[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD\uDDCF\uDDD4\uDDD6-\uDDDD](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDDDE\uDDDF](?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDD0D\uDD0E\uDD10-\uDD17\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCC\uDDD0\uDDE0-\uDDFF\uDE70-\uDE7C\uDE80-\uDE88\uDE90-\uDEBD\uDEBF-\uDEC2\uDECE-\uDEDB\uDEE0-\uDEE8]|\uDD3C(?:\u200D[\u2640\u2642]\uFE0F?|\uD83C[\uDFFB-\uDFFF])?|\uDDCE(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?|\uDDD1(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1|\uDDD1\u200D\uD83E\uDDD2(?:\u200D\uD83E\uDDD2)?|\uDDD2(?:\u200D\uD83E\uDDD2)?))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFC-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB\uDFFD-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB-\uDFFD\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB-\uDFFE]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?))?|\uDEF1(?:\uD83C(?:\uDFFB(?:\u200D\uD83E\uDEF2\uD83C[\uDFFC-\uDFFF])?|\uDFFC(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB\uDFFD-\uDFFF])?|\uDFFD(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])?|\uDFFE(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB-\uDFFD\uDFFF])?|\uDFFF(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB-\uDFFE])?))?)/g;
-    };
-    function hasUnicode(str, options) {
-      var emoji = options.emoji, nonBmp = options.nonBmp, punctuations = options.punctuations;
-      var value = false;
-      if (emoji) {
-        value || (value = emoji_regex_default().test(str));
+    var title_text_default = titleText;
+    function namedFromContents(vNode) {
+      var _ref33 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, strict = _ref33.strict;
+      vNode = vNode instanceof abstract_virtual_node_default ? vNode : get_node_from_tree_default(vNode);
+      if (vNode.props.nodeType !== 1) {
+        return false;
       }
-      if (nonBmp) {
-        value || (value = getUnicodeNonBmpRegExp().test(str) || getSupplementaryPrivateUseRegExp().test(str) || getCategoryFormatRegExp().test(str));
+      var role = get_role_default(vNode);
+      var roleDef = standards_default.ariaRoles[role];
+      if (roleDef && roleDef.nameFromContent) {
+        return true;
       }
-      if (punctuations) {
-        value || (value = getPunctuationRegExp().test(str));
+      if (strict) {
+        return false;
       }
-      return value;
+      return !roleDef || [ 'presentation', 'none' ].includes(role);
     }
-    var has_unicode_default = hasUnicode;
-    function _isIconLigature(textVNode) {
-      var differenceThreshold = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : .15;
-      var occurrenceThreshold = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 3;
-      var nodeValue = textVNode.actualNode.nodeValue.trim();
-      if (!sanitize_default(nodeValue) || has_unicode_default(nodeValue, {
-        emoji: true,
-        nonBmp: true
-      })) {
-        return false;
+    var named_from_contents_default = namedFromContents;
+    function getOwnedVirtual(virtualNode) {
+      var actualNode = virtualNode.actualNode, children = virtualNode.children;
+      if (!children) {
+        throw new Error('getOwnedVirtual requires a virtual node');
       }
-      var canvasContext = cache_default.get('canvasContext', function() {
-        return document.createElement('canvas').getContext('2d', {
-          willReadFrequently: true
+      if (virtualNode.hasAttr('aria-owns')) {
+        var owns = idrefs_default(actualNode, 'aria-owns').filter(function(element) {
+          return !!element;
+        }).map(function(element) {
+          return axe.utils.getNodeFromTree(element);
         });
-      });
-      var canvas = canvasContext.canvas;
-      var fonts = cache_default.get('fonts', function() {
-        return {};
-      });
-      var style = window.getComputedStyle(textVNode.parent.actualNode);
-      var fontFamily = style.getPropertyValue('font-family');
-      if (!fonts[fontFamily]) {
-        fonts[fontFamily] = {
-          occurrences: 0,
-          numLigatures: 0
-        };
-      }
-      var font = fonts[fontFamily];
-      if (font.occurrences >= occurrenceThreshold) {
-        if (font.numLigatures / font.occurrences === 1) {
-          return true;
-        } else if (font.numLigatures === 0) {
-          return false;
-        }
+        return [].concat(_toConsumableArray(children), _toConsumableArray(owns));
       }
-      font.occurrences++;
-      var fontSize = 30;
-      var fontStyle = ''.concat(fontSize, 'px ').concat(fontFamily);
-      canvasContext.font = fontStyle;
-      var firstChar = nodeValue.charAt(0);
-      var width = canvasContext.measureText(firstChar).width;
-      if (width === 0) {
-        font.numLigatures++;
-        return true;
+      return _toConsumableArray(children);
+    }
+    var get_owned_virtual_default = getOwnedVirtual;
+    var unsupported_default = {
+      accessibleNameFromFieldValue: [ 'progressbar' ]
+    };
+    function _isVisibleToScreenReaders(vNode) {
+      vNode = _nodeLookup(vNode).vNode;
+      return isVisibleToScreenReadersVirtual(vNode);
+    }
+    var isVisibleToScreenReadersVirtual = memoize_default(function isVisibleToScreenReadersMemoized(vNode, isAncestor) {
+      if (ariaHidden(vNode) || _isInert(vNode, {
+        skipAncestors: true,
+        isAncestor: isAncestor
+      })) {
+        return false;
       }
-      if (width < 30) {
-        var diff = 30 / width;
-        width *= diff;
-        fontSize *= diff;
-        fontStyle = ''.concat(fontSize, 'px ').concat(fontFamily);
+      if (vNode.actualNode && vNode.props.nodeName === 'area') {
+        return !areaHidden(vNode, isVisibleToScreenReadersVirtual);
       }
-      canvas.width = width;
-      canvas.height = fontSize;
-      canvasContext.font = fontStyle;
-      canvasContext.textAlign = 'left';
-      canvasContext.textBaseline = 'top';
-      canvasContext.fillText(firstChar, 0, 0);
-      var compareData = new Uint32Array(canvasContext.getImageData(0, 0, width, fontSize).data.buffer);
-      if (!compareData.some(function(pixel) {
-        return pixel;
+      if (_isHiddenForEveryone(vNode, {
+        skipAncestors: true,
+        isAncestor: isAncestor
       })) {
-        font.numLigatures++;
-        return true;
+        return false;
       }
-      canvasContext.clearRect(0, 0, width, fontSize);
-      canvasContext.fillText(nodeValue, 0, 0);
-      var compareWith = new Uint32Array(canvasContext.getImageData(0, 0, width, fontSize).data.buffer);
-      var differences = compareData.reduce(function(diff, pixel, i) {
-        if (pixel === 0 && compareWith[i] === 0) {
-          return diff;
-        }
-        if (pixel !== 0 && compareWith[i] !== 0) {
-          return diff;
-        }
-        return ++diff;
-      }, 0);
-      var expectedWidth = nodeValue.split('').reduce(function(totalWidth, _char2) {
-        return totalWidth + canvasContext.measureText(_char2).width;
-      }, 0);
-      var actualWidth = canvasContext.measureText(nodeValue).width;
-      var pixelDifference = differences / compareData.length;
-      var sizeDifference = 1 - actualWidth / expectedWidth;
-      if (pixelDifference >= differenceThreshold && sizeDifference >= differenceThreshold) {
-        font.numLigatures++;
+      if (!vNode.parent) {
         return true;
       }
-      return false;
+      return isVisibleToScreenReadersVirtual(vNode.parent, true);
+    });
+    function visibleVirtual(element, screenReader, noRecursing) {
+      var _nodeLookup12 = _nodeLookup(element), vNode = _nodeLookup12.vNode;
+      var visibleMethod = screenReader ? _isVisibleToScreenReaders : _isVisibleOnScreen;
+      var visible2 = !element.actualNode || element.actualNode && visibleMethod(element);
+      var result = vNode.children.map(function(child) {
+        var _child$props = child.props, nodeType = _child$props.nodeType, nodeValue = _child$props.nodeValue;
+        if (nodeType === 3) {
+          if (nodeValue && visible2) {
+            return nodeValue;
+          }
+        } else if (!noRecursing) {
+          return visibleVirtual(child, screenReader);
+        }
+      }).join('');
+      return sanitize_default(result);
     }
-    function _accessibleTextVirtual(virtualNode) {
+    var visible_virtual_default = visibleVirtual;
+    var nonTextInputTypes = [ 'button', 'checkbox', 'color', 'file', 'hidden', 'image', 'password', 'radio', 'reset', 'submit' ];
+    function isNativeTextbox(node) {
+      node = node instanceof abstract_virtual_node_default ? node : get_node_from_tree_default(node);
+      var nodeName2 = node.props.nodeName;
+      return nodeName2 === 'textarea' || nodeName2 === 'input' && !nonTextInputTypes.includes((node.attr('type') || '').toLowerCase());
+    }
+    var is_native_textbox_default = isNativeTextbox;
+    function isNativeSelect(node) {
+      node = node instanceof abstract_virtual_node_default ? node : get_node_from_tree_default(node);
+      var nodeName2 = node.props.nodeName;
+      return nodeName2 === 'select';
+    }
+    var is_native_select_default = isNativeSelect;
+    function isAriaTextbox(node) {
+      var role = get_explicit_role_default(node);
+      return role === 'textbox';
+    }
+    var is_aria_textbox_default = isAriaTextbox;
+    function isAriaListbox(node) {
+      var role = get_explicit_role_default(node);
+      return role === 'listbox';
+    }
+    var is_aria_listbox_default = isAriaListbox;
+    function isAriaCombobox(node) {
+      var role = get_explicit_role_default(node);
+      return role === 'combobox';
+    }
+    var is_aria_combobox_default = isAriaCombobox;
+    var rangeRoles = [ 'progressbar', 'scrollbar', 'slider', 'spinbutton' ];
+    function isAriaRange(node) {
+      var role = get_explicit_role_default(node);
+      return rangeRoles.includes(role);
+    }
+    var is_aria_range_default = isAriaRange;
+    var controlValueRoles = [ 'textbox', 'progressbar', 'scrollbar', 'slider', 'spinbutton', 'combobox', 'listbox' ];
+    var _formControlValueMethods = {
+      nativeTextboxValue: nativeTextboxValue,
+      nativeSelectValue: nativeSelectValue,
+      ariaTextboxValue: ariaTextboxValue,
+      ariaListboxValue: ariaListboxValue,
+      ariaComboboxValue: ariaComboboxValue,
+      ariaRangeValue: ariaRangeValue
+    };
+    function formControlValue(virtualNode) {
       var context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-      context = prepareContext(virtualNode, context);
-      if (shouldIgnoreHidden(virtualNode, context)) {
-        return '';
-      }
-      if (shouldIgnoreIconLigature(virtualNode, context)) {
+      var actualNode = virtualNode.actualNode;
+      var unsupportedRoles = unsupported_default.accessibleNameFromFieldValue || [];
+      var role = get_role_default(virtualNode);
+      if (context.startNode === virtualNode || !controlValueRoles.includes(role) || unsupportedRoles.includes(role)) {
         return '';
       }
-      var computationSteps = [ arialabelledby_text_default, _arialabelText, _nativeTextAlternative, form_control_value_default, subtree_text_default, textNodeValue, title_text_default ];
-      var accessibleName = computationSteps.reduce(function(accName, step) {
-        if (context.startNode === virtualNode) {
-          accName = sanitize_default(accName);
-        }
-        if (accName !== '') {
-          return accName;
-        }
-        return step(virtualNode, context);
+      var valueMethods = Object.keys(_formControlValueMethods).map(function(name) {
+        return _formControlValueMethods[name];
+      });
+      var valueString = valueMethods.reduce(function(accName, step) {
+        return accName || step(virtualNode, context);
       }, '');
       if (context.debug) {
-        axe.log(accessibleName || '{empty-value}', virtualNode.actualNode, context);
+        log_default(valueString || '{empty-value}', actualNode, context);
       }
-      return accessibleName;
+      return valueString;
     }
-    function textNodeValue(virtualNode) {
-      if (virtualNode.props.nodeType !== 3) {
-        return '';
+    function nativeTextboxValue(node) {
+      var _nodeLookup13 = _nodeLookup(node), vNode = _nodeLookup13.vNode;
+      if (is_native_textbox_default(vNode)) {
+        return vNode.props.value || '';
       }
-      return virtualNode.props.nodeValue;
+      return '';
     }
-    function shouldIgnoreHidden(virtualNode, context) {
-      if (!virtualNode) {
-        return false;
-      }
-      if (virtualNode.props.nodeType !== 1 || context.includeHidden) {
-        return false;
+    function nativeSelectValue(node) {
+      var _nodeLookup14 = _nodeLookup(node), vNode = _nodeLookup14.vNode;
+      if (!is_native_select_default(vNode)) {
+        return '';
       }
-      return !_isVisibleToScreenReaders(virtualNode);
-    }
-    function shouldIgnoreIconLigature(virtualNode, context) {
-      var _context$occurrenceTh;
-      var ignoreIconLigature = context.ignoreIconLigature, pixelThreshold = context.pixelThreshold;
-      var occurrenceThreshold = (_context$occurrenceTh = context.occurrenceThreshold) !== null && _context$occurrenceTh !== void 0 ? _context$occurrenceTh : context.occuranceThreshold;
-      if (virtualNode.props.nodeType !== 3 || !ignoreIconLigature) {
-        return false;
+      var options = query_selector_all_default(vNode, 'option');
+      var selectedOptions = options.filter(function(option) {
+        return option.props.selected;
+      });
+      if (!selectedOptions.length) {
+        selectedOptions.push(options[0]);
       }
-      return _isIconLigature(virtualNode, pixelThreshold, occurrenceThreshold);
+      return selectedOptions.map(function(option) {
+        return visible_virtual_default(option);
+      }).join(' ') || '';
     }
-    function prepareContext(virtualNode, context) {
-      if (!context.startNode) {
-        context = _extends({
-          startNode: virtualNode
-        }, context);
+    function ariaTextboxValue(node) {
+      var _nodeLookup15 = _nodeLookup(node), vNode = _nodeLookup15.vNode, domNode = _nodeLookup15.domNode;
+      if (!is_aria_textbox_default(vNode)) {
+        return '';
       }
-      if (virtualNode.props.nodeType === 1 && context.inLabelledByContext && context.includeHidden === void 0) {
-        context = _extends({
-          includeHidden: !_isVisibleToScreenReaders(virtualNode)
-        }, context);
+      if (!domNode || domNode && !_isHiddenForEveryone(domNode)) {
+        return visible_virtual_default(vNode, true);
+      } else {
+        return domNode.textContent;
       }
-      return context;
     }
-    _accessibleTextVirtual.alreadyProcessed = function alreadyProcessed(virtualnode, context) {
-      context.processed = context.processed || [];
-      if (context.processed.includes(virtualnode)) {
-        return true;
-      }
-      context.processed.push(virtualnode);
-      return false;
-    };
-    function removeUnicode(str, options) {
-      var emoji = options.emoji, nonBmp = options.nonBmp, punctuations = options.punctuations;
-      if (emoji) {
-        str = str.replace(emoji_regex_default(), '');
-      }
-      if (nonBmp) {
-        str = str.replace(getUnicodeNonBmpRegExp(), '').replace(getSupplementaryPrivateUseRegExp(), '').replace(getCategoryFormatRegExp(), '');
+    function ariaListboxValue(node, context) {
+      var _nodeLookup16 = _nodeLookup(node), vNode = _nodeLookup16.vNode;
+      if (!is_aria_listbox_default(vNode)) {
+        return '';
       }
-      if (punctuations) {
-        str = str.replace(getPunctuationRegExp(), '');
+      var selected = get_owned_virtual_default(vNode).filter(function(owned) {
+        return get_role_default(owned) === 'option' && owned.attr('aria-selected') === 'true';
+      });
+      if (selected.length === 0) {
+        return '';
       }
-      return str;
+      return _accessibleTextVirtual(selected[0], context);
     }
-    var remove_unicode_default = removeUnicode;
-    function isHumanInterpretable(str) {
-      if (!str.length) {
-        return 0;
-      }
-      var alphaNumericIconMap = [ 'x', 'i' ];
-      if (alphaNumericIconMap.includes(str)) {
-        return 0;
+    function ariaComboboxValue(node, context) {
+      var _nodeLookup17 = _nodeLookup(node), vNode = _nodeLookup17.vNode;
+      if (!is_aria_combobox_default(vNode)) {
+        return '';
       }
-      var noUnicodeStr = remove_unicode_default(str, {
-        emoji: true,
-        nonBmp: true,
-        punctuations: true
-      });
-      if (!sanitize_default(noUnicodeStr)) {
-        return 0;
+      var listbox = get_owned_virtual_default(vNode).filter(function(elm) {
+        return get_role_default(elm) === 'listbox';
+      })[0];
+      return listbox ? ariaListboxValue(listbox, context) : '';
+    }
+    function ariaRangeValue(node) {
+      var _nodeLookup18 = _nodeLookup(node), vNode = _nodeLookup18.vNode;
+      if (!is_aria_range_default(vNode) || !vNode.hasAttr('aria-valuenow')) {
+        return '';
       }
-      return 1;
+      var valueNow = +vNode.attr('aria-valuenow');
+      return !isNaN(valueNow) ? String(valueNow) : '0';
     }
-    var is_human_interpretable_default = isHumanInterpretable;
-    var _autocomplete = {
-      stateTerms: [ 'on', 'off' ],
-      standaloneTerms: [ 'name', 'honorific-prefix', 'given-name', 'additional-name', 'family-name', 'honorific-suffix', 'nickname', 'username', 'new-password', 'current-password', 'organization-title', 'organization', 'street-address', 'address-line1', 'address-line2', 'address-line3', 'address-level4', 'address-level3', 'address-level2', 'address-level1', 'country', 'country-name', 'postal-code', 'cc-name', 'cc-given-name', 'cc-additional-name', 'cc-family-name', 'cc-number', 'cc-exp', 'cc-exp-month', 'cc-exp-year', 'cc-csc', 'cc-type', 'transaction-currency', 'transaction-amount', 'language', 'bday', 'bday-day', 'bday-month', 'bday-year', 'sex', 'url', 'photo', 'one-time-code' ],
-      qualifiers: [ 'home', 'work', 'mobile', 'fax', 'pager' ],
-      qualifiedTerms: [ 'tel', 'tel-country-code', 'tel-national', 'tel-area-code', 'tel-local', 'tel-local-prefix', 'tel-local-suffix', 'tel-extension', 'email', 'impp' ],
-      locations: [ 'billing', 'shipping' ]
-    };
-    function isValidAutocomplete(autocompleteValue) {
-      var _ref37 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, _ref37$looseTyped = _ref37.looseTyped, looseTyped = _ref37$looseTyped === void 0 ? false : _ref37$looseTyped, _ref37$stateTerms = _ref37.stateTerms, stateTerms = _ref37$stateTerms === void 0 ? [] : _ref37$stateTerms, _ref37$locations = _ref37.locations, locations = _ref37$locations === void 0 ? [] : _ref37$locations, _ref37$qualifiers = _ref37.qualifiers, qualifiers = _ref37$qualifiers === void 0 ? [] : _ref37$qualifiers, _ref37$standaloneTerm = _ref37.standaloneTerms, standaloneTerms = _ref37$standaloneTerm === void 0 ? [] : _ref37$standaloneTerm, _ref37$qualifiedTerms = _ref37.qualifiedTerms, qualifiedTerms = _ref37$qualifiedTerms === void 0 ? [] : _ref37$qualifiedTerms;
-      autocompleteValue = autocompleteValue.toLowerCase().trim();
-      stateTerms = stateTerms.concat(_autocomplete.stateTerms);
-      if (stateTerms.includes(autocompleteValue) || autocompleteValue === '') {
-        return true;
+    var form_control_value_default = formControlValue;
+    function subtreeText(virtualNode) {
+      var context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+      var alreadyProcessed2 = _accessibleTextVirtual.alreadyProcessed;
+      context.startNode = context.startNode || virtualNode;
+      var _context = context, strict = _context.strict, inControlContext = _context.inControlContext, inLabelledByContext = _context.inLabelledByContext;
+      var role = get_role_default(virtualNode);
+      var _get_element_spec_def2 = get_element_spec_default(virtualNode, {
+        noMatchAccessibleName: true
+      }), contentTypes = _get_element_spec_def2.contentTypes;
+      if (alreadyProcessed2(virtualNode, context) || virtualNode.props.nodeType !== 1 || contentTypes !== null && contentTypes !== void 0 && contentTypes.includes('embedded') || controlValueRoles.includes(role)) {
+        return '';
       }
-      qualifiers = qualifiers.concat(_autocomplete.qualifiers);
-      locations = locations.concat(_autocomplete.locations);
-      standaloneTerms = standaloneTerms.concat(_autocomplete.standaloneTerms);
-      qualifiedTerms = qualifiedTerms.concat(_autocomplete.qualifiedTerms);
-      var autocompleteTerms = autocompleteValue.split(/\s+/g);
-      if (autocompleteTerms[autocompleteTerms.length - 1] === 'webauthn') {
-        autocompleteTerms.pop();
-        if (autocompleteTerms.length === 0) {
-          return false;
-        }
+      if (!context.subtreeDescendant && !context.inLabelledByContext && !named_from_contents_default(virtualNode, {
+        strict: strict
+      })) {
+        return '';
       }
-      if (!looseTyped) {
-        if (autocompleteTerms[0].length > 8 && autocompleteTerms[0].substr(0, 8) === 'section-') {
-          autocompleteTerms.shift();
-        }
-        if (locations.includes(autocompleteTerms[0])) {
-          autocompleteTerms.shift();
-        }
-        if (qualifiers.includes(autocompleteTerms[0])) {
-          autocompleteTerms.shift();
-          standaloneTerms = [];
-        }
-        if (autocompleteTerms.length !== 1) {
-          return false;
-        }
+      if (!strict) {
+        var subtreeDescendant = !inControlContext && !inLabelledByContext;
+        context = _extends({
+          subtreeDescendant: subtreeDescendant
+        }, context);
       }
-      var purposeTerm = autocompleteTerms[autocompleteTerms.length - 1];
-      return standaloneTerms.includes(purposeTerm) || qualifiedTerms.includes(purposeTerm);
+      return get_owned_virtual_default(virtualNode).reduce(function(contentText, child) {
+        return appendAccessibleText(contentText, child, context);
+      }, '');
     }
-    var is_valid_autocomplete_default = isValidAutocomplete;
-    function labelVirtual(virtualNode) {
-      var ref, candidate;
-      if (virtualNode.attr('aria-labelledby')) {
-        ref = idrefs_default(virtualNode.actualNode, 'aria-labelledby');
-        candidate = ref.map(function(thing) {
-          var vNode = get_node_from_tree_default(thing);
-          return vNode ? visible_virtual_default(vNode) : '';
-        }).join(' ').trim();
-        if (candidate) {
-          return candidate;
-        }
+    var phrasingElements = get_elements_by_content_type_default('phrasing').concat([ '#text' ]);
+    function appendAccessibleText(contentText, virtualNode, context) {
+      var nodeName2 = virtualNode.props.nodeName;
+      var contentTextAdd = _accessibleTextVirtual(virtualNode, context);
+      if (!contentTextAdd) {
+        return contentText;
       }
-      candidate = virtualNode.attr('aria-label');
-      if (candidate) {
-        candidate = sanitize_default(candidate);
-        if (candidate) {
-          return candidate;
+      if (!phrasingElements.includes(nodeName2)) {
+        if (contentTextAdd[0] !== ' ') {
+          contentTextAdd += ' ';
+        }
+        if (contentText && contentText[contentText.length - 1] !== ' ') {
+          contentTextAdd = ' ' + contentTextAdd;
         }
       }
-      return null;
-    }
-    var label_virtual_default = labelVirtual;
-    function visible(element, screenReader, noRecursing) {
-      element = get_node_from_tree_default(element);
-      return visible_virtual_default(element, screenReader, noRecursing);
+      return contentText + contentTextAdd;
     }
-    var visible_default = visible;
-    function labelVirtual2(virtualNode) {
-      var ref, candidate, doc;
-      candidate = label_virtual_default(virtualNode);
-      if (candidate) {
-        return candidate;
+    var subtree_text_default = subtreeText;
+    function labelText(virtualNode) {
+      var context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+      var alreadyProcessed2 = _accessibleTextVirtual.alreadyProcessed;
+      if (context.inControlContext || context.inLabelledByContext || alreadyProcessed2(virtualNode, context)) {
+        return '';
       }
-      if (virtualNode.attr('id')) {
-        if (!virtualNode.actualNode) {
-          throw new TypeError('Cannot resolve explicit label reference for non-DOM nodes');
-        }
-        var id = escape_selector_default(virtualNode.attr('id'));
-        doc = get_root_node_default2(virtualNode.actualNode);
-        ref = doc.querySelector('label[for="' + id + '"]');
-        candidate = ref && visible_default(ref, true);
-        if (candidate) {
-          return candidate;
-        }
+      if (!context.startNode) {
+        context.startNode = virtualNode;
       }
-      ref = closest_default(virtualNode, 'label');
-      candidate = ref && visible_virtual_default(ref, true);
-      if (candidate) {
-        return candidate;
+      var labelContext = _extends({
+        inControlContext: true
+      }, context);
+      var explicitLabels = getExplicitLabels(virtualNode);
+      var implicitLabel = closest_default(virtualNode, 'label');
+      var labels;
+      if (implicitLabel) {
+        labels = [].concat(_toConsumableArray(explicitLabels), [ implicitLabel.actualNode ]);
+        labels.sort(node_sorter_default);
+      } else {
+        labels = explicitLabels;
       }
-      return null;
+      return labels.map(function(label3) {
+        return accessible_text_default(label3, labelContext);
+      }).filter(function(text) {
+        return text !== '';
+      }).join(' ');
     }
-    var label_virtual_default2 = labelVirtual2;
-    function label(node) {
-      node = get_node_from_tree_default(node);
-      return label_virtual_default2(node);
+    function getExplicitLabels(virtualNode) {
+      if (!virtualNode.attr('id')) {
+        return [];
+      }
+      if (!virtualNode.actualNode) {
+        throw new TypeError('Cannot resolve explicit label reference for non-DOM nodes');
+      }
+      return find_elms_in_context_default({
+        elm: 'label',
+        attr: 'for',
+        value: virtualNode.attr('id'),
+        context: virtualNode.actualNode
+      });
     }
-    var label_default = label;
-    var nativeElementType = [ {
-      matches: [ {
-        nodeName: 'textarea'
-      }, {
-        nodeName: 'input',
-        properties: {
-          type: [ 'text', 'password', 'search', 'tel', 'email', 'url' ]
-        }
-      } ],
-      namingMethods: 'labelText'
-    }, {
-      matches: {
-        nodeName: 'input',
-        properties: {
-          type: [ 'button', 'submit', 'reset' ]
-        }
+    var label_text_default = labelText;
+    var defaultButtonValues = {
+      submit: 'Submit',
+      image: 'Submit',
+      reset: 'Reset',
+      button: ''
+    };
+    var nativeTextMethods = {
+      valueText: function valueText(_ref34) {
+        var actualNode = _ref34.actualNode;
+        return actualNode.value || '';
       },
-      namingMethods: [ 'valueText', 'titleText', 'buttonDefaultText' ]
-    }, {
-      matches: {
-        nodeName: 'input',
-        properties: {
-          type: 'image'
-        }
+      buttonDefaultText: function buttonDefaultText(_ref35) {
+        var actualNode = _ref35.actualNode;
+        return defaultButtonValues[actualNode.type] || '';
       },
-      namingMethods: [ 'altText', 'valueText', 'labelText', 'titleText', 'buttonDefaultText' ]
-    }, {
-      matches: 'button',
-      namingMethods: 'subtreeText'
-    }, {
-      matches: 'fieldset',
-      namingMethods: 'fieldsetLegendText'
-    }, {
-      matches: 'OUTPUT',
-      namingMethods: 'subtreeText'
-    }, {
-      matches: [ {
-        nodeName: 'select'
-      }, {
-        nodeName: 'input',
-        properties: {
-          type: /^(?!text|password|search|tel|email|url|button|submit|reset)/
-        }
-      } ],
-      namingMethods: 'labelText'
-    }, {
-      matches: 'summary',
-      namingMethods: 'subtreeText'
-    }, {
-      matches: 'figure',
-      namingMethods: [ 'figureText', 'titleText' ]
-    }, {
-      matches: 'img',
-      namingMethods: 'altText'
-    }, {
-      matches: 'table',
-      namingMethods: [ 'tableCaptionText', 'tableSummaryText' ]
-    }, {
-      matches: [ 'hr', 'br' ],
-      namingMethods: [ 'titleText', 'singleSpace' ]
-    } ];
-    var native_element_type_default = nativeElementType;
-    function visibleTextNodes(vNode) {
-      var parentVisible = _isVisibleOnScreen(vNode);
-      var nodes = [];
-      vNode.children.forEach(function(child) {
-        if (child.actualNode.nodeType === 3) {
-          if (parentVisible) {
-            nodes.push(child);
-          }
-        } else {
-          nodes = nodes.concat(visibleTextNodes(child));
-        }
-      });
-      return nodes;
-    }
-    var visible_text_nodes_default = visibleTextNodes;
-    var getVisibleChildTextRects = memoize_default(function getVisibleChildTextRectsMemoized(node) {
-      var vNode = get_node_from_tree_default(node);
-      var nodeRect = vNode.boundingClientRect;
-      var clientRects = [];
-      var overflowHiddenNodes = get_overflow_hidden_ancestors_default(vNode);
-      node.childNodes.forEach(function(textNode) {
-        if (textNode.nodeType !== 3 || sanitize_default(textNode.nodeValue) === '') {
-          return;
-        }
-        var contentRects = getContentRects(textNode);
-        if (isOutsideNodeBounds(contentRects, nodeRect)) {
-          return;
-        }
-        clientRects.push.apply(clientRects, _toConsumableArray(filterHiddenRects(contentRects, overflowHiddenNodes)));
-      });
-      return clientRects.length ? clientRects : filterHiddenRects([ nodeRect ], overflowHiddenNodes);
-    });
-    var get_visible_child_text_rects_default = getVisibleChildTextRects;
-    function getContentRects(node) {
-      var range2 = document.createRange();
-      range2.selectNodeContents(node);
-      return Array.from(range2.getClientRects());
-    }
-    function isOutsideNodeBounds(rects, nodeRect) {
-      return rects.some(function(rect) {
-        var centerPoint = _getRectCenter(rect);
-        return !_isPointInRect(centerPoint, nodeRect);
-      });
-    }
-    function filterHiddenRects(contentRects, overflowHiddenNodes) {
-      var visibleRects = [];
-      contentRects.forEach(function(contentRect) {
-        if (contentRect.width < 1 || contentRect.height < 1) {
-          return;
-        }
-        var visibleRect = overflowHiddenNodes.reduce(function(rect, overflowNode) {
-          return rect && _getIntersectionRect(rect, overflowNode.boundingClientRect);
-        }, contentRect);
-        if (visibleRect) {
-          visibleRects.push(visibleRect);
-        }
-      });
-      return visibleRects;
-    }
-    function getTextElementStack(node) {
-      _createGrid();
-      var vNode = get_node_from_tree_default(node);
-      var grid = vNode._grid;
-      if (!grid) {
-        return [];
+      tableCaptionText: descendantText.bind(null, 'caption'),
+      figureText: descendantText.bind(null, 'figcaption'),
+      svgTitleText: descendantText.bind(null, 'title'),
+      fieldsetLegendText: descendantText.bind(null, 'legend'),
+      altText: attrText.bind(null, 'alt'),
+      tableSummaryText: attrText.bind(null, 'summary'),
+      titleText: title_text_default,
+      subtreeText: subtree_text_default,
+      labelText: label_text_default,
+      singleSpace: function singleSpace() {
+        return ' ';
+      },
+      placeholderText: attrText.bind(null, 'placeholder')
+    };
+    function attrText(attr, vNode) {
+      return vNode.attr(attr) || '';
+    }
+    function descendantText(nodeName2, _ref36, context) {
+      var actualNode = _ref36.actualNode;
+      nodeName2 = nodeName2.toLowerCase();
+      var nodeNames2 = [ nodeName2, actualNode.nodeName.toLowerCase() ].join(',');
+      var candidate = actualNode.querySelector(nodeNames2);
+      if (!candidate || candidate.nodeName.toLowerCase() !== nodeName2) {
+        return '';
       }
-      var clientRects = get_visible_child_text_rects_default(node);
-      return clientRects.map(function(rect) {
-        return getRectStack(grid, rect);
-      });
+      return accessible_text_default(candidate, context);
     }
-    var get_text_element_stack_default = getTextElementStack;
-    var visualRoles = [ 'checkbox', 'img', 'meter', 'progressbar', 'scrollbar', 'radio', 'slider', 'spinbutton', 'textbox' ];
-    function isVisualContent(el) {
-      var _nodeLookup19 = _nodeLookup(el), vNode = _nodeLookup19.vNode;
-      var role = axe.commons.aria.getExplicitRole(vNode);
-      if (role) {
-        return visualRoles.indexOf(role) !== -1;
+    var native_text_methods_default = nativeTextMethods;
+    function _nativeTextAlternative(virtualNode) {
+      var context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+      var actualNode = virtualNode.actualNode;
+      if (virtualNode.props.nodeType !== 1 || [ 'presentation', 'none' ].includes(get_role_default(virtualNode))) {
+        return '';
       }
-      switch (vNode.props.nodeName) {
-       case 'img':
-       case 'iframe':
-       case 'object':
-       case 'video':
-       case 'audio':
-       case 'canvas':
-       case 'svg':
-       case 'math':
-       case 'button':
-       case 'select':
-       case 'textarea':
-       case 'keygen':
-       case 'progress':
-       case 'meter':
-        return true;
-
-       case 'input':
-        return vNode.props.type !== 'hidden';
-
-       default:
-        return false;
+      var textMethods = findTextMethods(virtualNode);
+      var accessibleName = textMethods.reduce(function(accName, step) {
+        return accName || step(virtualNode, context);
+      }, '');
+      if (context.debug) {
+        axe.log(accessibleName || '{empty-value}', actualNode, context);
       }
+      return accessibleName;
     }
-    var is_visual_content_default = isVisualContent;
-    var hiddenTextElms = [ 'head', 'title', 'template', 'script', 'style', 'iframe', 'object', 'video', 'audio', 'noscript' ];
-    function hasChildTextNodes(elm) {
-      if (hiddenTextElms.includes(elm.props.nodeName)) {
-        return false;
-      }
-      return elm.children.some(function(_ref38) {
-        var props = _ref38.props;
-        return props.nodeType === 3 && props.nodeValue.trim();
+    function findTextMethods(virtualNode) {
+      var elmSpec = get_element_spec_default(virtualNode, {
+        noMatchAccessibleName: true
       });
-    }
-    function hasContentVirtual(elm, noRecursion, ignoreAria) {
-      return hasChildTextNodes(elm) || is_visual_content_default(elm.actualNode) || !ignoreAria && !!label_virtual_default(elm) || !noRecursion && elm.children.some(function(child) {
-        return child.actualNode.nodeType === 1 && hasContentVirtual(child);
+      var methods = elmSpec.namingMethods || [];
+      return methods.map(function(methodName) {
+        return native_text_methods_default[methodName];
       });
     }
-    var has_content_virtual_default = hasContentVirtual;
-    function hasContent(elm, noRecursion, ignoreAria) {
-      elm = get_node_from_tree_default(elm);
-      return has_content_virtual_default(elm, noRecursion, ignoreAria);
+    function getUnicodeNonBmpRegExp() {
+      return /[\u1D00-\u1D7F\u1D80-\u1DBF\u1DC0-\u1DFF\u20A0-\u20CF\u20D0-\u20FF\u2100-\u214F\u2150-\u218F\u2190-\u21FF\u2200-\u22FF\u2300-\u23FF\u2400-\u243F\u2440-\u245F\u2460-\u24FF\u2500-\u257F\u2580-\u259F\u25A0-\u25FF\u2600-\u26FF\u2700-\u27BF\uE000-\uF8FF]/g;
     }
-    var has_content_default = hasContent;
-    function _hasLangText(virtualNode) {
-      if (typeof virtualNode.children === 'undefined' || hasChildTextNodes(virtualNode)) {
-        return true;
-      }
-      if (virtualNode.props.nodeType === 1 && is_visual_content_default(virtualNode)) {
-        return !!axe.commons.text.accessibleTextVirtual(virtualNode);
-      }
-      return virtualNode.children.some(function(child) {
-        return !child.attr('lang') && _hasLangText(child) && !_isHiddenForEveryone(child);
-      });
+    function getPunctuationRegExp() {
+      return /[\u2000-\u206F\u2E00-\u2E7F\\'!"#$%&\xa3\xa2\xa5\xa7\u20ac()*+,\-.\/:;<=>?@\[\]^_`{|}~\xb1]/g;
     }
-    function insertedIntoFocusOrder(el) {
-      var tabIndex = parseInt(el.getAttribute('tabindex'), 10);
-      return tabIndex > -1 && _isFocusable(el) && !is_natively_focusable_default(el);
+    function getSupplementaryPrivateUseRegExp() {
+      return /[\uDB80-\uDBBF][\uDC00-\uDFFF]/g;
     }
-    var inserted_into_focus_order_default = insertedIntoFocusOrder;
-    function isHiddenWithCSS(el, descendentVisibilityValue) {
-      var _nodeLookup20 = _nodeLookup(el), vNode = _nodeLookup20.vNode, domNode = _nodeLookup20.domNode;
-      if (!vNode) {
-        return _isHiddenWithCSS(domNode, descendentVisibilityValue);
+    function getCategoryFormatRegExp() {
+      return /[\xAD\u0600-\u0605\u061C\u06DD\u070F\u08E2\u180E\u200B-\u200F\u202A-\u202E\u2060-\u2064\u2066-\u206F\uFEFF\uFFF9-\uFFFB]|\uD804[\uDCBD\uDCCD]|\uD80D[\uDC30-\uDC38]|\uD82F[\uDCA0-\uDCA3]|\uD834[\uDD73-\uDD7A]|\uDB40[\uDC01\uDC20-\uDC7F]/g;
+    }
+    var emoji_regex_default = function emoji_regex_default() {
+      return /[#*0-9]\uFE0F?\u20E3|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26AA\u26B0\u26B1\u26BD\u26BE\u26C4\u26C8\u26CF\u26D1\u26E9\u26F0-\u26F5\u26F7\u26F8\u26FA\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2757\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B55\u3030\u303D\u3297\u3299]\uFE0F?|[\u261D\u270C\u270D](?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?|[\u270A\u270B](?:\uD83C[\uDFFB-\uDFFF])?|[\u23E9-\u23EC\u23F0\u23F3\u25FD\u2693\u26A1\u26AB\u26C5\u26CE\u26D4\u26EA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2795-\u2797\u27B0\u27BF\u2B50]|\u26D3\uFE0F?(?:\u200D\uD83D\uDCA5)?|\u26F9(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|\u2764\uFE0F?(?:\u200D(?:\uD83D\uDD25|\uD83E\uDE79))?|\uD83C(?:[\uDC04\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]\uFE0F?|[\uDF85\uDFC2\uDFC7](?:\uD83C[\uDFFB-\uDFFF])?|[\uDFC4\uDFCA](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDFCB\uDFCC](?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF43\uDF45-\uDF4A\uDF4C-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uDDE6\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF]|\uDDE7\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF]|\uDDE8\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF]|\uDDE9\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF]|\uDDEA\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA]|\uDDEB\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7]|\uDDEC\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE]|\uDDED\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA]|\uDDEE\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9]|\uDDEF\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5]|\uDDF0\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF]|\uDDF1\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE]|\uDDF2\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF]|\uDDF3\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF]|\uDDF4\uD83C\uDDF2|\uDDF5\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE]|\uDDF6\uD83C\uDDE6|\uDDF7\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC]|\uDDF8\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF]|\uDDF9\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF]|\uDDFA\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF]|\uDDFB\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA]|\uDDFC\uD83C[\uDDEB\uDDF8]|\uDDFD\uD83C\uDDF0|\uDDFE\uD83C[\uDDEA\uDDF9]|\uDDFF\uD83C[\uDDE6\uDDF2\uDDFC]|\uDF44(?:\u200D\uD83D\uDFEB)?|\uDF4B(?:\u200D\uD83D\uDFE9)?|\uDFC3(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?|\uDFF3\uFE0F?(?:\u200D(?:\u26A7\uFE0F?|\uD83C\uDF08))?|\uDFF4(?:\u200D\u2620\uFE0F?|\uDB40\uDC67\uDB40\uDC62\uDB40(?:\uDC65\uDB40\uDC6E\uDB40\uDC67|\uDC73\uDB40\uDC63\uDB40\uDC74|\uDC77\uDB40\uDC6C\uDB40\uDC73)\uDB40\uDC7F)?)|\uD83D(?:[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3]\uFE0F?|[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC](?:\uD83C[\uDFFB-\uDFFF])?|[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4\uDEB5](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDD74\uDD90](?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?|[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC25\uDC27-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE41\uDE43\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED7\uDEDC-\uDEDF\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB\uDFF0]|\uDC08(?:\u200D\u2B1B)?|\uDC15(?:\u200D\uD83E\uDDBA)?|\uDC26(?:\u200D(?:\u2B1B|\uD83D\uDD25))?|\uDC3B(?:\u200D\u2744\uFE0F?)?|\uDC41\uFE0F?(?:\u200D\uD83D\uDDE8\uFE0F?)?|\uDC68(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDC68\uDC69]\u200D\uD83D(?:\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?)|[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?)|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFC-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFD-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFD\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFE])))?))?|\uDC69(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?[\uDC68\uDC69]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?|\uDC69\u200D\uD83D(?:\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?))|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFC-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB\uDFFD-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB-\uDFFD\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB-\uDFFE])))?))?|\uDC6F(?:\u200D[\u2640\u2642]\uFE0F?)?|\uDD75(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|\uDE2E(?:\u200D\uD83D\uDCA8)?|\uDE35(?:\u200D\uD83D\uDCAB)?|\uDE36(?:\u200D\uD83C\uDF2B\uFE0F?)?|\uDE42(?:\u200D[\u2194\u2195]\uFE0F?)?|\uDEB6(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?)|\uD83E(?:[\uDD0C\uDD0F\uDD18-\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5\uDEC3-\uDEC5\uDEF0\uDEF2-\uDEF8](?:\uD83C[\uDFFB-\uDFFF])?|[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD\uDDCF\uDDD4\uDDD6-\uDDDD](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDDDE\uDDDF](?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDD0D\uDD0E\uDD10-\uDD17\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCC\uDDD0\uDDE0-\uDDFF\uDE70-\uDE7C\uDE80-\uDE88\uDE90-\uDEBD\uDEBF-\uDEC2\uDECE-\uDEDB\uDEE0-\uDEE8]|\uDD3C(?:\u200D[\u2640\u2642]\uFE0F?|\uD83C[\uDFFB-\uDFFF])?|\uDDCE(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?|\uDDD1(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1|\uDDD1\u200D\uD83E\uDDD2(?:\u200D\uD83E\uDDD2)?|\uDDD2(?:\u200D\uD83E\uDDD2)?))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFC-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB\uDFFD-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB-\uDFFD\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB-\uDFFE]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?))?|\uDEF1(?:\uD83C(?:\uDFFB(?:\u200D\uD83E\uDEF2\uD83C[\uDFFC-\uDFFF])?|\uDFFC(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB\uDFFD-\uDFFF])?|\uDFFD(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])?|\uDFFE(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB-\uDFFD\uDFFF])?|\uDFFF(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB-\uDFFE])?))?)/g;
+    };
+    function hasUnicode(str, options) {
+      var emoji = options.emoji, nonBmp = options.nonBmp, punctuations = options.punctuations;
+      var value = false;
+      if (emoji) {
+        value || (value = emoji_regex_default().test(str));
       }
-      if (vNode._isHiddenWithCSS === void 0) {
-        vNode._isHiddenWithCSS = _isHiddenWithCSS(domNode, descendentVisibilityValue);
+      if (nonBmp) {
+        value || (value = getUnicodeNonBmpRegExp().test(str) || getSupplementaryPrivateUseRegExp().test(str) || getCategoryFormatRegExp().test(str));
       }
-      return vNode._isHiddenWithCSS;
+      if (punctuations) {
+        value || (value = getPunctuationRegExp().test(str));
+      }
+      return value;
     }
-    function _isHiddenWithCSS(el, descendentVisibilityValue) {
-      if (el.nodeType === 9) {
+    var has_unicode_default = hasUnicode;
+    function _isIconLigature(textVNode) {
+      var differenceThreshold = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : .15;
+      var occurrenceThreshold = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 3;
+      var nodeValue = textVNode.actualNode.nodeValue.trim();
+      if (!sanitize_default(nodeValue) || has_unicode_default(nodeValue, {
+        emoji: true,
+        nonBmp: true
+      })) {
         return false;
       }
-      if (el.nodeType === 11) {
-        el = el.host;
-      }
-      if ([ 'STYLE', 'SCRIPT' ].includes(el.nodeName.toUpperCase())) {
-        return false;
+      var canvasContext = cache_default.get('canvasContext', function() {
+        return document.createElement('canvas').getContext('2d', {
+          willReadFrequently: true
+        });
+      });
+      var canvas = canvasContext.canvas;
+      var fonts = cache_default.get('fonts', function() {
+        return {};
+      });
+      var style = window.getComputedStyle(textVNode.parent.actualNode);
+      var fontFamily = style.getPropertyValue('font-family');
+      if (!fonts[fontFamily]) {
+        fonts[fontFamily] = {
+          occurrences: 0,
+          numLigatures: 0
+        };
       }
-      var style = window.getComputedStyle(el, null);
-      if (!style) {
-        throw new Error('Style does not exist for the given element.');
+      var font = fonts[fontFamily];
+      if (font.occurrences >= occurrenceThreshold) {
+        if (font.numLigatures / font.occurrences === 1) {
+          return true;
+        } else if (font.numLigatures === 0) {
+          return false;
+        }
       }
-      var displayValue = style.getPropertyValue('display');
-      if (displayValue === 'none') {
+      font.occurrences++;
+      var fontSize = 30;
+      var fontStyle = ''.concat(fontSize, 'px ').concat(fontFamily);
+      canvasContext.font = fontStyle;
+      var firstChar = nodeValue.charAt(0);
+      var width = canvasContext.measureText(firstChar).width;
+      if (width === 0) {
+        font.numLigatures++;
         return true;
       }
-      var HIDDEN_VISIBILITY_VALUES = [ 'hidden', 'collapse' ];
-      var visibilityValue = style.getPropertyValue('visibility');
-      if (HIDDEN_VISIBILITY_VALUES.includes(visibilityValue) && !descendentVisibilityValue) {
-        return true;
+      if (width < 30) {
+        var diff = 30 / width;
+        width *= diff;
+        fontSize *= diff;
+        fontStyle = ''.concat(fontSize, 'px ').concat(fontFamily);
       }
-      if (HIDDEN_VISIBILITY_VALUES.includes(visibilityValue) && descendentVisibilityValue && HIDDEN_VISIBILITY_VALUES.includes(descendentVisibilityValue)) {
+      canvas.width = width;
+      canvas.height = fontSize;
+      canvasContext.font = fontStyle;
+      canvasContext.textAlign = 'left';
+      canvasContext.textBaseline = 'top';
+      canvasContext.fillText(firstChar, 0, 0);
+      var compareData = new Uint32Array(canvasContext.getImageData(0, 0, width, fontSize).data.buffer);
+      if (!compareData.some(function(pixel) {
+        return pixel;
+      })) {
+        font.numLigatures++;
         return true;
       }
-      var parent = get_composed_parent_default(el);
-      if (parent && !HIDDEN_VISIBILITY_VALUES.includes(visibilityValue)) {
-        return isHiddenWithCSS(parent, visibilityValue);
+      canvasContext.clearRect(0, 0, width, fontSize);
+      canvasContext.fillText(nodeValue, 0, 0);
+      var compareWith = new Uint32Array(canvasContext.getImageData(0, 0, width, fontSize).data.buffer);
+      var differences = compareData.reduce(function(diff, pixel, i) {
+        if (pixel === 0 && compareWith[i] === 0) {
+          return diff;
+        }
+        if (pixel !== 0 && compareWith[i] !== 0) {
+          return diff;
+        }
+        return ++diff;
+      }, 0);
+      var expectedWidth = nodeValue.split('').reduce(function(totalWidth, _char2) {
+        return totalWidth + canvasContext.measureText(_char2).width;
+      }, 0);
+      var actualWidth = canvasContext.measureText(nodeValue).width;
+      var pixelDifference = differences / compareData.length;
+      var sizeDifference = 1 - actualWidth / expectedWidth;
+      if (pixelDifference >= differenceThreshold && sizeDifference >= differenceThreshold) {
+        font.numLigatures++;
+        return true;
       }
       return false;
     }
-    var is_hidden_with_css_default = isHiddenWithCSS;
-    function isHTML5(doc) {
-      var node = doc.doctype;
-      if (node === null) {
-        return false;
+    function _accessibleTextVirtual(virtualNode) {
+      var context = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+      context = prepareContext(virtualNode, context);
+      if (shouldIgnoreHidden(virtualNode, context)) {
+        return '';
       }
-      return node.name === 'html' && !node.publicId && !node.systemId;
-    }
-    var is_html5_default = isHTML5;
-    function getRoleType(role) {
-      var _window3;
-      if (role instanceof abstract_virtual_node_default || (_window3 = window) !== null && _window3 !== void 0 && _window3.Node && role instanceof window.Node) {
-        role = axe.commons.aria.getRole(role);
+      if (shouldIgnoreIconLigature(virtualNode, context)) {
+        return '';
+      }
+      var computationSteps = [ arialabelledby_text_default, _arialabelText, _nativeTextAlternative, form_control_value_default, subtree_text_default, textNodeValue, title_text_default ];
+      var accessibleName = computationSteps.reduce(function(accName, step) {
+        if (context.startNode === virtualNode) {
+          accName = sanitize_default(accName);
+        }
+        if (accName !== '') {
+          return accName;
+        }
+        return step(virtualNode, context);
+      }, '');
+      if (context.debug) {
+        axe.log(accessibleName || '{empty-value}', virtualNode.actualNode, context);
       }
-      var roleDef = standards_default.ariaRoles[role];
-      return (roleDef === null || roleDef === void 0 ? void 0 : roleDef.type) || null;
+      return accessibleName;
     }
-    var get_role_type_default = getRoleType;
-    function walkDomNode(node, functor) {
-      if (functor(node.actualNode) !== false) {
-        node.children.forEach(function(child) {
-          return walkDomNode(child, functor);
-        });
+    function textNodeValue(virtualNode) {
+      if (virtualNode.props.nodeType !== 3) {
+        return '';
       }
+      return virtualNode.props.nodeValue;
     }
-    var blockLike = [ 'block', 'list-item', 'table', 'flex', 'grid', 'inline-block' ];
-    function isBlock(elm) {
-      var display2 = window.getComputedStyle(elm).getPropertyValue('display');
-      return blockLike.includes(display2) || display2.substr(0, 6) === 'table-';
-    }
-    function getBlockParent(node) {
-      var parentBlock = get_composed_parent_default(node);
-      while (parentBlock && !isBlock(parentBlock)) {
-        parentBlock = get_composed_parent_default(parentBlock);
+    function shouldIgnoreHidden(virtualNode, context) {
+      if (!virtualNode) {
+        return false;
       }
-      return get_node_from_tree_default(parentBlock);
-    }
-    function isInTextBlock(node, options) {
-      if (isBlock(node)) {
+      if (virtualNode.props.nodeType !== 1 || context.includeHidden) {
         return false;
       }
-      var virtualParent = getBlockParent(node);
-      var parentText = '';
-      var widgetText = '';
-      var inBrBlock = 0;
-      walkDomNode(virtualParent, function(currNode) {
-        if (inBrBlock === 2) {
-          return false;
-        }
-        if (currNode.nodeType === 3) {
-          parentText += currNode.nodeValue;
-        }
-        if (currNode.nodeType !== 1) {
-          return;
-        }
-        var nodeName2 = (currNode.nodeName || '').toUpperCase();
-        if (currNode === node) {
-          inBrBlock = 1;
-        }
-        if ([ 'BR', 'HR' ].includes(nodeName2)) {
-          if (inBrBlock === 0) {
-            parentText = '';
-            widgetText = '';
-          } else {
-            inBrBlock = 2;
-          }
-        } else if (currNode.style.display === 'none' || currNode.style.overflow === 'hidden' || ![ '', null, 'none' ].includes(currNode.style['float']) || ![ '', null, 'relative' ].includes(currNode.style.position)) {
-          return false;
-        } else if (get_role_type_default(currNode) === 'widget') {
-          widgetText += currNode.textContent;
-          return false;
-        }
-      });
-      parentText = sanitize_default(parentText);
-      if (options !== null && options !== void 0 && options.noLengthCompare) {
-        return parentText.length !== 0;
+      return !_isVisibleToScreenReaders(virtualNode);
+    }
+    function shouldIgnoreIconLigature(virtualNode, context) {
+      var _context$occurrenceTh;
+      var ignoreIconLigature = context.ignoreIconLigature, pixelThreshold = context.pixelThreshold;
+      var occurrenceThreshold = (_context$occurrenceTh = context.occurrenceThreshold) !== null && _context$occurrenceTh !== void 0 ? _context$occurrenceTh : context.occuranceThreshold;
+      if (virtualNode.props.nodeType !== 3 || !ignoreIconLigature) {
+        return false;
       }
-      widgetText = sanitize_default(widgetText);
-      return parentText.length > widgetText.length;
+      return _isIconLigature(virtualNode, pixelThreshold, occurrenceThreshold);
     }
-    var is_in_text_block_default = isInTextBlock;
-    function isModalOpen(options) {
-      options = options || {};
-      var modalPercent = options.modalPercent || .75;
-      if (cache_default.get('isModalOpen')) {
-        return cache_default.get('isModalOpen');
+    function prepareContext(virtualNode, context) {
+      if (!context.startNode) {
+        context = _extends({
+          startNode: virtualNode
+        }, context);
       }
-      var definiteModals = query_selector_all_filter_default(axe._tree[0], 'dialog, [role=dialog], [aria-modal=true]', _isVisibleOnScreen);
-      if (definiteModals.length) {
-        cache_default.set('isModalOpen', true);
+      if (virtualNode.props.nodeType === 1 && context.inLabelledByContext && context.includeHidden === void 0) {
+        context = _extends({
+          includeHidden: !_isVisibleToScreenReaders(virtualNode)
+        }, context);
+      }
+      return context;
+    }
+    _accessibleTextVirtual.alreadyProcessed = function alreadyProcessed(virtualnode, context) {
+      context.processed = context.processed || [];
+      if (context.processed.includes(virtualnode)) {
         return true;
       }
-      var viewport = get_viewport_size_default(window);
-      var percentWidth = viewport.width * modalPercent;
-      var percentHeight = viewport.height * modalPercent;
-      var x = (viewport.width - percentWidth) / 2;
-      var y = (viewport.height - percentHeight) / 2;
-      var points = [ {
-        x: x,
-        y: y
-      }, {
-        x: viewport.width - x,
-        y: y
-      }, {
-        x: viewport.width / 2,
-        y: viewport.height / 2
-      }, {
-        x: x,
-        y: viewport.height - y
-      }, {
-        x: viewport.width - x,
-        y: viewport.height - y
-      } ];
-      var stacks = points.map(function(point) {
-        return Array.from(document.elementsFromPoint(point.x, point.y));
-      });
-      var _loop4 = function _loop4() {
-        var modalElement = stacks[_i10].find(function(elm) {
-          var style = window.getComputedStyle(elm);
-          return parseInt(style.width, 10) >= percentWidth && parseInt(style.height, 10) >= percentHeight && style.getPropertyValue('pointer-events') !== 'none' && (style.position === 'absolute' || style.position === 'fixed');
-        });
-        if (modalElement && stacks.every(function(stack) {
-          return stack.includes(modalElement);
-        })) {
-          cache_default.set('isModalOpen', true);
-          return {
-            v: true
-          };
-        }
-      }, _ret2;
-      for (var _i10 = 0; _i10 < stacks.length; _i10++) {
-        _ret2 = _loop4();
-        if (_ret2) {
-          return _ret2.v;
-        }
+      context.processed.push(virtualnode);
+      return false;
+    };
+    function removeUnicode(str, options) {
+      var emoji = options.emoji, nonBmp = options.nonBmp, punctuations = options.punctuations;
+      if (emoji) {
+        str = str.replace(emoji_regex_default(), '');
       }
-      cache_default.set('isModalOpen', void 0);
-      return void 0;
+      if (nonBmp) {
+        str = str.replace(getUnicodeNonBmpRegExp(), '').replace(getSupplementaryPrivateUseRegExp(), '').replace(getCategoryFormatRegExp(), '');
+      }
+      if (punctuations) {
+        str = str.replace(getPunctuationRegExp(), '');
+      }
+      return str;
     }
-    var is_modal_open_default = isModalOpen;
-    function _isMultiline(domNode) {
-      var margin = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 2;
-      var range2 = domNode.ownerDocument.createRange();
-      range2.setStart(domNode, 0);
-      range2.setEnd(domNode, domNode.childNodes.length);
-      var lastLineEnd = 0;
-      var lineCount = 0;
-      var _iterator5 = _createForOfIteratorHelper(range2.getClientRects()), _step5;
-      try {
-        for (_iterator5.s(); !(_step5 = _iterator5.n()).done; ) {
-          var rect = _step5.value;
-          if (rect.height <= margin) {
-            continue;
-          }
-          if (lastLineEnd > rect.top + margin) {
-            lastLineEnd = Math.max(lastLineEnd, rect.bottom);
-          } else if (lineCount === 0) {
-            lastLineEnd = rect.bottom;
-            lineCount++;
-          } else {
-            return true;
-          }
-        }
-      } catch (err) {
-        _iterator5.e(err);
-      } finally {
-        _iterator5.f();
+    var remove_unicode_default = removeUnicode;
+    function isHumanInterpretable(str) {
+      if (isEmpty(str) || isNonDigitCharacter(str) || isSymbolicText(str) || isUnicodeOrPunctuation(str)) {
+        return 0;
       }
-      return false;
+      return 1;
     }
-    function isNode(element) {
-      return element instanceof window.Node;
+    function isEmpty(str) {
+      return sanitize_default(str).length === 0;
     }
-    var is_node_default = isNode;
-    var cacheKey = 'color.incompleteData';
-    var incompleteData = {
-      set: function set(key, reason) {
-        if (typeof key !== 'string') {
-          throw new Error('Incomplete data: key must be a string');
-        }
-        var data = cache_default.get(cacheKey, function() {
-          return {};
-        });
-        if (reason) {
-          data[key] = reason;
-        }
-        return data[key];
-      },
-      get: function get(key) {
-        var data = cache_default.get(cacheKey);
-        return data === null || data === void 0 ? void 0 : data[key];
-      },
-      clear: function clear() {
-        cache_default.set(cacheKey, {});
-      }
+    function isNonDigitCharacter(str) {
+      return str.length === 1 && str.match(/\D/);
+    }
+    function isSymbolicText(str) {
+      var symbolicText = [ 'aa', 'abc' ];
+      return symbolicText.includes(str.toLowerCase());
+    }
+    function isUnicodeOrPunctuation(str) {
+      var noUnicodeStr = remove_unicode_default(str, {
+        emoji: true,
+        nonBmp: true,
+        punctuations: true
+      });
+      return !sanitize_default(noUnicodeStr);
+    }
+    var is_human_interpretable_default = isHumanInterpretable;
+    var _autocomplete = {
+      stateTerms: [ 'on', 'off' ],
+      standaloneTerms: [ 'name', 'honorific-prefix', 'given-name', 'additional-name', 'family-name', 'honorific-suffix', 'nickname', 'username', 'new-password', 'current-password', 'organization-title', 'organization', 'street-address', 'address-line1', 'address-line2', 'address-line3', 'address-level4', 'address-level3', 'address-level2', 'address-level1', 'country', 'country-name', 'postal-code', 'cc-name', 'cc-given-name', 'cc-additional-name', 'cc-family-name', 'cc-number', 'cc-exp', 'cc-exp-month', 'cc-exp-year', 'cc-csc', 'cc-type', 'transaction-currency', 'transaction-amount', 'language', 'bday', 'bday-day', 'bday-month', 'bday-year', 'sex', 'url', 'photo', 'one-time-code' ],
+      qualifiers: [ 'home', 'work', 'mobile', 'fax', 'pager' ],
+      qualifiedTerms: [ 'tel', 'tel-country-code', 'tel-national', 'tel-area-code', 'tel-local', 'tel-local-prefix', 'tel-local-suffix', 'tel-extension', 'email', 'impp' ],
+      locations: [ 'billing', 'shipping' ]
     };
-    var incomplete_data_default = incompleteData;
-    function elementHasImage(elm, style) {
-      var graphicNodes = [ 'IMG', 'CANVAS', 'OBJECT', 'IFRAME', 'VIDEO', 'SVG' ];
-      var nodeName2 = elm.nodeName.toUpperCase();
-      if (graphicNodes.includes(nodeName2)) {
-        incomplete_data_default.set('bgColor', 'imgNode');
+    function isValidAutocomplete(autocompleteValue) {
+      var _ref37 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, _ref37$looseTyped = _ref37.looseTyped, looseTyped = _ref37$looseTyped === void 0 ? false : _ref37$looseTyped, _ref37$stateTerms = _ref37.stateTerms, stateTerms = _ref37$stateTerms === void 0 ? [] : _ref37$stateTerms, _ref37$locations = _ref37.locations, locations = _ref37$locations === void 0 ? [] : _ref37$locations, _ref37$qualifiers = _ref37.qualifiers, qualifiers = _ref37$qualifiers === void 0 ? [] : _ref37$qualifiers, _ref37$standaloneTerm = _ref37.standaloneTerms, standaloneTerms = _ref37$standaloneTerm === void 0 ? [] : _ref37$standaloneTerm, _ref37$qualifiedTerms = _ref37.qualifiedTerms, qualifiedTerms = _ref37$qualifiedTerms === void 0 ? [] : _ref37$qualifiedTerms;
+      autocompleteValue = autocompleteValue.toLowerCase().trim();
+      stateTerms = stateTerms.concat(_autocomplete.stateTerms);
+      if (stateTerms.includes(autocompleteValue) || autocompleteValue === '') {
         return true;
       }
-      style = style || window.getComputedStyle(elm);
-      var bgImageStyle = style.getPropertyValue('background-image');
-      var hasBgImage = bgImageStyle !== 'none';
-      if (hasBgImage) {
-        var hasGradient = /gradient/.test(bgImageStyle);
-        incomplete_data_default.set('bgColor', hasGradient ? 'bgGradient' : 'bgImage');
+      qualifiers = qualifiers.concat(_autocomplete.qualifiers);
+      locations = locations.concat(_autocomplete.locations);
+      standaloneTerms = standaloneTerms.concat(_autocomplete.standaloneTerms);
+      qualifiedTerms = qualifiedTerms.concat(_autocomplete.qualifiedTerms);
+      var autocompleteTerms = autocompleteValue.split(/\s+/g);
+      if (autocompleteTerms[autocompleteTerms.length - 1] === 'webauthn') {
+        autocompleteTerms.pop();
+        if (autocompleteTerms.length === 0) {
+          return false;
+        }
+      }
+      if (!looseTyped) {
+        if (autocompleteTerms[0].length > 8 && autocompleteTerms[0].substr(0, 8) === 'section-') {
+          autocompleteTerms.shift();
+        }
+        if (locations.includes(autocompleteTerms[0])) {
+          autocompleteTerms.shift();
+        }
+        if (qualifiers.includes(autocompleteTerms[0])) {
+          autocompleteTerms.shift();
+          standaloneTerms = [];
+        }
+        if (autocompleteTerms.length !== 1) {
+          return false;
+        }
       }
-      return hasBgImage;
+      var purposeTerm = autocompleteTerms[autocompleteTerms.length - 1];
+      return standaloneTerms.includes(purposeTerm) || qualifiedTerms.includes(purposeTerm);
     }
-    var element_has_image_default = elementHasImage;
-    var imports_exports = {};
-    __export(imports_exports, {
-      Colorjs: function Colorjs() {
-        return Color;
-      },
-      CssSelectorParser: function CssSelectorParser() {
-        return import_css_selector_parser2.CssSelectorParser;
-      },
-      doT: function doT() {
-        return import_dot['default'];
-      },
-      emojiRegexText: function emojiRegexText() {
-        return emoji_regex_default;
-      },
-      memoize: function memoize() {
-        return import_memoizee2['default'];
+    var is_valid_autocomplete_default = isValidAutocomplete;
+    function labelVirtual(virtualNode) {
+      var ref, candidate;
+      if (virtualNode.attr('aria-labelledby')) {
+        ref = idrefs_default(virtualNode.actualNode, 'aria-labelledby');
+        candidate = ref.map(function(thing) {
+          var vNode = get_node_from_tree_default(thing);
+          return vNode ? visible_virtual_default(vNode) : '';
+        }).join(' ').trim();
+        if (candidate) {
+          return candidate;
+        }
       }
-    });
-    var import_es6_promise = __toModule(require_es6_promise());
-    var import_typedarray = __toModule(require_typedarray());
-    var import_weakmap_polyfill = __toModule(require_weakmap_polyfill());
-    var import_has_own = __toModule(require_has_own3());
-    var import_values = __toModule(require_values3());
-    if (!('hasOwn' in Object)) {
-      Object.hasOwn = import_has_own['default'];
-    }
-    if (!('values' in Object)) {
-      Object.values = import_values['default'];
-    }
-    if (!('Promise' in window)) {
-      import_es6_promise['default'].polyfill();
+      candidate = virtualNode.attr('aria-label');
+      if (candidate) {
+        candidate = sanitize_default(candidate);
+        if (candidate) {
+          return candidate;
+        }
+      }
+      return null;
     }
-    if (!('Uint32Array' in window)) {
-      window.Uint32Array = import_typedarray.Uint32Array;
+    var label_virtual_default = labelVirtual;
+    function visible(element, screenReader, noRecursing) {
+      element = get_node_from_tree_default(element);
+      return visible_virtual_default(element, screenReader, noRecursing);
     }
-    if (window.Uint32Array) {
-      if (!('some' in window.Uint32Array.prototype)) {
-        Object.defineProperty(window.Uint32Array.prototype, 'some', {
-          value: Array.prototype.some
-        });
+    var visible_default = visible;
+    function labelVirtual2(virtualNode) {
+      var ref, candidate, doc;
+      candidate = label_virtual_default(virtualNode);
+      if (candidate) {
+        return candidate;
       }
-      if (!('reduce' in window.Uint32Array.prototype)) {
-        Object.defineProperty(window.Uint32Array.prototype, 'reduce', {
-          value: Array.prototype.reduce
-        });
+      if (virtualNode.attr('id')) {
+        if (!virtualNode.actualNode) {
+          throw new TypeError('Cannot resolve explicit label reference for non-DOM nodes');
+        }
+        var id = escape_selector_default(virtualNode.attr('id'));
+        doc = get_root_node_default2(virtualNode.actualNode);
+        ref = doc.querySelector('label[for="' + id + '"]');
+        candidate = ref && visible_default(ref, true);
+        if (candidate) {
+          return candidate;
+        }
+      }
+      ref = closest_default(virtualNode, 'label');
+      candidate = ref && visible_virtual_default(ref, true);
+      if (candidate) {
+        return candidate;
       }
+      return null;
     }
-    if (typeof Object.assign !== 'function') {
-      (function() {
-        Object.assign = function(target) {
-          if (target === void 0 || target === null) {
-            throw new TypeError('Cannot convert undefined or null to object');
-          }
-          var output = Object(target);
-          for (var index = 1; index < arguments.length; index++) {
-            var source = arguments[index];
-            if (source !== void 0 && source !== null) {
-              for (var nextKey in source) {
-                if (source.hasOwnProperty(nextKey)) {
-                  output[nextKey] = source[nextKey];
-                }
-              }
-            }
-          }
-          return output;
-        };
-      })();
+    var label_virtual_default2 = labelVirtual2;
+    function label(node) {
+      node = get_node_from_tree_default(node);
+      return label_virtual_default2(node);
     }
-    if (!Array.prototype.find) {
-      Object.defineProperty(Array.prototype, 'find', {
-        value: function value(predicate) {
-          if (this === null) {
-            throw new TypeError('Array.prototype.find called on null or undefined');
-          }
-          if (typeof predicate !== 'function') {
-            throw new TypeError('predicate must be a function');
-          }
-          var list = Object(this);
-          var length = list.length >>> 0;
-          var thisArg = arguments[1];
-          var value;
-          for (var i = 0; i < length; i++) {
-            value = list[i];
-            if (predicate.call(thisArg, value, i, list)) {
-              return value;
-            }
-          }
-          return void 0;
+    var label_default = label;
+    var nativeElementType = [ {
+      matches: [ {
+        nodeName: 'textarea'
+      }, {
+        nodeName: 'input',
+        properties: {
+          type: [ 'text', 'password', 'search', 'tel', 'email', 'url' ]
         }
-      });
-    }
-    if (!Array.prototype.findIndex) {
-      Object.defineProperty(Array.prototype, 'findIndex', {
-        value: function value(predicate, thisArg) {
-          if (this === null) {
-            throw new TypeError('Array.prototype.find called on null or undefined');
-          }
-          if (typeof predicate !== 'function') {
-            throw new TypeError('predicate must be a function');
-          }
-          var list = Object(this);
-          var length = list.length >>> 0;
-          var value;
-          for (var i = 0; i < length; i++) {
-            value = list[i];
-            if (predicate.call(thisArg, value, i, list)) {
-              return i;
-            }
-          }
-          return -1;
+      } ],
+      namingMethods: 'labelText'
+    }, {
+      matches: {
+        nodeName: 'input',
+        properties: {
+          type: [ 'button', 'submit', 'reset' ]
         }
-      });
-    }
-    if (!Array.prototype.includes) {
-      Object.defineProperty(Array.prototype, 'includes', {
-        value: function value(searchElement) {
-          var O = Object(this);
-          var len = parseInt(O.length, 10) || 0;
-          if (len === 0) {
-            return false;
-          }
-          var n2 = parseInt(arguments[1], 10) || 0;
-          var k;
-          if (n2 >= 0) {
-            k = n2;
-          } else {
-            k = len + n2;
-            if (k < 0) {
-              k = 0;
-            }
-          }
-          var currentElement;
-          while (k < len) {
-            currentElement = O[k];
-            if (searchElement === currentElement || searchElement !== searchElement && currentElement !== currentElement) {
-              return true;
-            }
-            k++;
+      },
+      namingMethods: [ 'valueText', 'titleText', 'buttonDefaultText' ]
+    }, {
+      matches: {
+        nodeName: 'input',
+        properties: {
+          type: 'image'
+        }
+      },
+      namingMethods: [ 'altText', 'valueText', 'labelText', 'titleText', 'buttonDefaultText' ]
+    }, {
+      matches: 'button',
+      namingMethods: 'subtreeText'
+    }, {
+      matches: 'fieldset',
+      namingMethods: 'fieldsetLegendText'
+    }, {
+      matches: 'OUTPUT',
+      namingMethods: 'subtreeText'
+    }, {
+      matches: [ {
+        nodeName: 'select'
+      }, {
+        nodeName: 'input',
+        properties: {
+          type: /^(?!text|password|search|tel|email|url|button|submit|reset)/
+        }
+      } ],
+      namingMethods: 'labelText'
+    }, {
+      matches: 'summary',
+      namingMethods: 'subtreeText'
+    }, {
+      matches: 'figure',
+      namingMethods: [ 'figureText', 'titleText' ]
+    }, {
+      matches: 'img',
+      namingMethods: 'altText'
+    }, {
+      matches: 'table',
+      namingMethods: [ 'tableCaptionText', 'tableSummaryText' ]
+    }, {
+      matches: [ 'hr', 'br' ],
+      namingMethods: [ 'titleText', 'singleSpace' ]
+    } ];
+    var native_element_type_default = nativeElementType;
+    function visibleTextNodes(vNode) {
+      var parentVisible = _isVisibleOnScreen(vNode);
+      var nodes = [];
+      vNode.children.forEach(function(child) {
+        if (child.actualNode.nodeType === 3) {
+          if (parentVisible) {
+            nodes.push(child);
           }
-          return false;
+        } else {
+          nodes = nodes.concat(visibleTextNodes(child));
         }
       });
+      return nodes;
     }
-    if (!Array.prototype.some) {
-      Object.defineProperty(Array.prototype, 'some', {
-        value: function value(fun) {
-          if (this == null) {
-            throw new TypeError('Array.prototype.some called on null or undefined');
-          }
-          if (typeof fun !== 'function') {
-            throw new TypeError();
-          }
-          var t = Object(this);
-          var len = t.length >>> 0;
-          var thisArg = arguments.length >= 2 ? arguments[1] : void 0;
-          for (var i = 0; i < len; i++) {
-            if (i in t && fun.call(thisArg, t[i], i, t)) {
-              return true;
-            }
-          }
-          return false;
+    var visible_text_nodes_default = visibleTextNodes;
+    var getVisibleChildTextRects = memoize_default(function getVisibleChildTextRectsMemoized(node) {
+      var vNode = get_node_from_tree_default(node);
+      var nodeRect = vNode.boundingClientRect;
+      var clientRects = [];
+      var overflowHiddenNodes = get_overflow_hidden_ancestors_default(vNode);
+      node.childNodes.forEach(function(textNode) {
+        if (textNode.nodeType !== 3 || sanitize_default(textNode.nodeValue) === '') {
+          return;
         }
+        var contentRects = getContentRects(textNode);
+        if (isOutsideNodeBounds(contentRects, nodeRect)) {
+          return;
+        }
+        clientRects.push.apply(clientRects, _toConsumableArray(filterHiddenRects(contentRects, overflowHiddenNodes)));
       });
+      return clientRects.length ? clientRects : filterHiddenRects([ nodeRect ], overflowHiddenNodes);
+    });
+    var get_visible_child_text_rects_default = getVisibleChildTextRects;
+    function getContentRects(node) {
+      var range2 = document.createRange();
+      range2.selectNodeContents(node);
+      return Array.from(range2.getClientRects());
     }
-    if (!Array.from) {
-      Object.defineProperty(Array, 'from', {
-        value: function() {
-          var toStr = Object.prototype.toString;
-          var isCallable = function isCallable(fn) {
-            return typeof fn === 'function' || toStr.call(fn) === '[object Function]';
-          };
-          var toInteger = function toInteger(value) {
-            var number = Number(value);
-            if (isNaN(number)) {
-              return 0;
-            }
-            if (number === 0 || !isFinite(number)) {
-              return number;
-            }
-            return (number > 0 ? 1 : -1) * Math.floor(Math.abs(number));
-          };
-          var maxSafeInteger = Math.pow(2, 53) - 1;
-          var toLength = function toLength(value) {
-            var len = toInteger(value);
-            return Math.min(Math.max(len, 0), maxSafeInteger);
-          };
-          return function from(arrayLike) {
-            var C = this;
-            var items = Object(arrayLike);
-            if (arrayLike == null) {
-              throw new TypeError('Array.from requires an array-like object - not null or undefined');
-            }
-            var mapFn = arguments.length > 1 ? arguments[1] : void 0;
-            var T;
-            if (typeof mapFn !== 'undefined') {
-              if (!isCallable(mapFn)) {
-                throw new TypeError('Array.from: when provided, the second argument must be a function');
-              }
-              if (arguments.length > 2) {
-                T = arguments[2];
-              }
-            }
-            var len = toLength(items.length);
-            var A = isCallable(C) ? Object(new C(len)) : new Array(len);
-            var k = 0;
-            var kValue;
-            while (k < len) {
-              kValue = items[k];
-              if (mapFn) {
-                A[k] = typeof T === 'undefined' ? mapFn(kValue, k) : mapFn.call(T, kValue, k);
-              } else {
-                A[k] = kValue;
-              }
-              k += 1;
-            }
-            A.length = len;
-            return A;
-          };
-        }()
+    function isOutsideNodeBounds(rects, nodeRect) {
+      return rects.some(function(rect) {
+        var centerPoint = _getRectCenter(rect);
+        return !_isPointInRect(centerPoint, nodeRect);
       });
     }
-    if (!String.prototype.includes) {
-      String.prototype.includes = function(search, start) {
-        if (typeof start !== 'number') {
-          start = 0;
-        }
-        if (start + search.length > this.length) {
-          return false;
-        } else {
-          return this.indexOf(search, start) !== -1;
+    function filterHiddenRects(contentRects, overflowHiddenNodes) {
+      var visibleRects = [];
+      contentRects.forEach(function(contentRect) {
+        if (contentRect.width < 1 || contentRect.height < 1) {
+          return;
         }
-      };
-    }
-    if (!Array.prototype.flat) {
-      Object.defineProperty(Array.prototype, 'flat', {
-        configurable: true,
-        value: function flat() {
-          var depth = isNaN(arguments[0]) ? 1 : Number(arguments[0]);
-          return depth ? Array.prototype.reduce.call(this, function(acc, cur) {
-            if (Array.isArray(cur)) {
-              acc.push.apply(acc, flat.call(cur, depth - 1));
-            } else {
-              acc.push(cur);
-            }
-            return acc;
-          }, []) : Array.prototype.slice.call(this);
-        },
-        writable: true
-      });
-    }
-    if (window.Node && !('isConnected' in window.Node.prototype)) {
-      Object.defineProperty(window.Node.prototype, 'isConnected', {
-        get: function get() {
-          return !this.ownerDocument || !(this.ownerDocument.compareDocumentPosition(this) & this.DOCUMENT_POSITION_DISCONNECTED);
+        var visibleRect = overflowHiddenNodes.reduce(function(rect, overflowNode) {
+          return rect && _getIntersectionRect(rect, overflowNode.boundingClientRect);
+        }, contentRect);
+        if (visibleRect) {
+          visibleRects.push(visibleRect);
         }
       });
+      return visibleRects;
     }
-    var import_css_selector_parser2 = __toModule(require_lib());
-    var import_dot = __toModule(require_doT());
-    var import_memoizee2 = __toModule(require_memoizee());
-    function multiplyMatrices(A, B) {
-      var m3 = A.length;
-      if (!Array.isArray(A[0])) {
-        A = [ A ];
-      }
-      if (!Array.isArray(B[0])) {
-        B = B.map(function(x) {
-          return [ x ];
-        });
+    function getTextElementStack(node) {
+      _createGrid();
+      var vNode = get_node_from_tree_default(node);
+      var grid = vNode._grid;
+      if (!grid) {
+        return [];
       }
-      var p2 = B[0].length;
-      var B_cols = B[0].map(function(_, i) {
-        return B.map(function(x) {
-          return x[i];
-        });
-      });
-      var product = A.map(function(row) {
-        return B_cols.map(function(col) {
-          var ret = 0;
-          if (!Array.isArray(row)) {
-            var _iterator6 = _createForOfIteratorHelper(col), _step6;
-            try {
-              for (_iterator6.s(); !(_step6 = _iterator6.n()).done; ) {
-                var c4 = _step6.value;
-                ret += row * c4;
-              }
-            } catch (err) {
-              _iterator6.e(err);
-            } finally {
-              _iterator6.f();
-            }
-            return ret;
-          }
-          for (var _i11 = 0; _i11 < row.length; _i11++) {
-            ret += row[_i11] * (col[_i11] || 0);
-          }
-          return ret;
-        });
+      var clientRects = get_visible_child_text_rects_default(node);
+      return clientRects.map(function(rect) {
+        return getRectStack(grid, rect);
       });
-      if (m3 === 1) {
-        product = product[0];
-      }
-      if (p2 === 1) {
-        return product.map(function(x) {
-          return x[0];
-        });
-      }
-      return product;
-    }
-    function isString(str) {
-      return type(str) === 'string';
-    }
-    function type(o) {
-      var str = Object.prototype.toString.call(o);
-      return (str.match(/^\[object\s+(.*?)\]$/)[1] || '').toLowerCase();
-    }
-    function serializeNumber(n2, _ref39) {
-      var precision = _ref39.precision, unit = _ref39.unit;
-      if (isNone(n2)) {
-        return 'none';
-      }
-      return toPrecision(n2, precision) + (unit !== null && unit !== void 0 ? unit : '');
-    }
-    function isNone(n2) {
-      return Number.isNaN(n2) || n2 instanceof Number && (n2 === null || n2 === void 0 ? void 0 : n2.none);
-    }
-    function skipNone(n2) {
-      return isNone(n2) ? 0 : n2;
-    }
-    function toPrecision(n2, precision) {
-      if (n2 === 0) {
-        return 0;
-      }
-      var integer = ~~n2;
-      var digits = 0;
-      if (integer && precision) {
-        digits = ~~Math.log10(Math.abs(integer)) + 1;
-      }
-      var multiplier = Math.pow(10, precision - digits);
-      return Math.floor(n2 * multiplier + .5) / multiplier;
-    }
-    var angleFactor = {
-      deg: 1,
-      grad: .9,
-      rad: 180 / Math.PI,
-      turn: 360
-    };
-    function parseFunction(str) {
-      if (!str) {
-        return;
-      }
-      str = str.trim();
-      var isFunctionRegex = /^([a-z]+)\((.+?)\)$/i;
-      var isNumberRegex = /^-?[\d.]+$/;
-      var unitValueRegex = /%|deg|g?rad|turn$/;
-      var singleArgument = /\/?\s*(none|[-\w.]+(?:%|deg|g?rad|turn)?)/g;
-      var parts = str.match(isFunctionRegex);
-      if (parts) {
-        var args = [];
-        parts[2].replace(singleArgument, function($0, rawArg) {
-          var match = rawArg.match(unitValueRegex);
-          var arg = rawArg;
-          if (match) {
-            var unit = match[0];
-            var unitlessArg = arg.slice(0, -unit.length);
-            if (unit === '%') {
-              arg = new Number(unitlessArg / 100);
-              arg.type = '';
-            } else {
-              arg = new Number(unitlessArg * angleFactor[unit]);
-              arg.type = '';
-              arg.unit = unit;
-            }
-          } else if (isNumberRegex.test(arg)) {
-            arg = new Number(arg);
-            arg.type = '';
-          } else if (arg === 'none') {
-            arg = new Number(NaN);
-            arg.none = true;
-          }
-          if ($0.startsWith('/')) {
-            arg = arg instanceof Number ? arg : new Number(arg);
-            arg.alpha = true;
-          }
-          if (_typeof(arg) === 'object' && arg instanceof Number) {
-            arg.raw = rawArg;
-          }
-          args.push(arg);
-        });
-        return {
-          name: parts[1].toLowerCase(),
-          rawName: parts[1],
-          rawArgs: parts[2],
-          args: args
-        };
-      }
     }
-    function last(arr) {
-      return arr[arr.length - 1];
-    }
-    function interpolate(start, end, p2) {
-      if (isNaN(start)) {
-        return end;
+    var get_text_element_stack_default = getTextElementStack;
+    var visualRoles = [ 'checkbox', 'img', 'meter', 'progressbar', 'scrollbar', 'radio', 'slider', 'spinbutton', 'textbox' ];
+    function isVisualContent(el) {
+      var _nodeLookup19 = _nodeLookup(el), vNode = _nodeLookup19.vNode;
+      var role = axe.commons.aria.getExplicitRole(vNode);
+      if (role) {
+        return visualRoles.indexOf(role) !== -1;
       }
-      if (isNaN(end)) {
-        return start;
+      switch (vNode.props.nodeName) {
+       case 'img':
+       case 'iframe':
+       case 'object':
+       case 'video':
+       case 'audio':
+       case 'canvas':
+       case 'svg':
+       case 'math':
+       case 'button':
+       case 'select':
+       case 'textarea':
+       case 'keygen':
+       case 'progress':
+       case 'meter':
+        return true;
+
+       case 'input':
+        return vNode.props.type !== 'hidden';
+
+       default:
+        return false;
       }
-      return start + (end - start) * p2;
-    }
-    function interpolateInv(start, end, value) {
-      return (value - start) / (end - start);
-    }
-    function mapRange(from, to2, value) {
-      return interpolate(to2[0], to2[1], interpolateInv(from[0], from[1], value));
     }
-    function parseCoordGrammar(coordGrammars) {
-      return coordGrammars.map(function(coordGrammar2) {
-        return coordGrammar2.split('|').map(function(type2) {
-          type2 = type2.trim();
-          var range2 = type2.match(/^(<[a-z]+>)\[(-?[.\d]+),\s*(-?[.\d]+)\]?$/);
-          if (range2) {
-            var ret = new String(range2[1]);
-            ret.range = [ +range2[2], +range2[3] ];
-            return ret;
-          }
-          return type2;
-        });
+    var is_visual_content_default = isVisualContent;
+    var hiddenTextElms = [ 'head', 'title', 'template', 'script', 'style', 'iframe', 'object', 'video', 'audio', 'noscript' ];
+    function hasChildTextNodes(elm) {
+      if (hiddenTextElms.includes(elm.props.nodeName)) {
+        return false;
+      }
+      return elm.children.some(function(_ref38) {
+        var props = _ref38.props;
+        return props.nodeType === 3 && props.nodeValue.trim();
       });
     }
-    function clamp(min, val, max2) {
-      return Math.max(Math.min(max2, val), min);
-    }
-    function copySign(to2, from) {
-      return Math.sign(to2) === Math.sign(from) ? to2 : -to2;
-    }
-    function spow(base, exp) {
-      return copySign(Math.pow(Math.abs(base), exp), base);
+    function hasContentVirtual(elm, noRecursion, ignoreAria) {
+      return hasChildTextNodes(elm) || is_visual_content_default(elm.actualNode) || !ignoreAria && !!label_virtual_default(elm) || !noRecursion && elm.children.some(function(child) {
+        return child.actualNode.nodeType === 1 && hasContentVirtual(child);
+      });
     }
-    function zdiv(n2, d2) {
-      return d2 === 0 ? 0 : n2 / d2;
+    var has_content_virtual_default = hasContentVirtual;
+    function hasContent(elm, noRecursion, ignoreAria) {
+      elm = get_node_from_tree_default(elm);
+      return has_content_virtual_default(elm, noRecursion, ignoreAria);
     }
-    function bisectLeft(arr, value) {
-      var lo = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
-      var hi = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : arr.length;
-      while (lo < hi) {
-        var mid = lo + hi >> 1;
-        if (arr[mid] < value) {
-          lo = mid + 1;
-        } else {
-          hi = mid;
-        }
+    var has_content_default = hasContent;
+    function _hasLangText(virtualNode) {
+      if (typeof virtualNode.children === 'undefined' || hasChildTextNodes(virtualNode)) {
+        return true;
       }
-      return lo;
-    }
-    var util = Object.freeze({
-      __proto__: null,
-      bisectLeft: bisectLeft,
-      clamp: clamp,
-      copySign: copySign,
-      interpolate: interpolate,
-      interpolateInv: interpolateInv,
-      isNone: isNone,
-      isString: isString,
-      last: last,
-      mapRange: mapRange,
-      multiplyMatrices: multiplyMatrices,
-      parseCoordGrammar: parseCoordGrammar,
-      parseFunction: parseFunction,
-      serializeNumber: serializeNumber,
-      skipNone: skipNone,
-      spow: spow,
-      toPrecision: toPrecision,
-      type: type,
-      zdiv: zdiv
-    });
-    var Hooks = function() {
-      function Hooks() {
-        _classCallCheck(this, Hooks);
+      if (virtualNode.props.nodeType === 1 && is_visual_content_default(virtualNode)) {
+        return !!axe.commons.text.accessibleTextVirtual(virtualNode);
       }
-      _createClass(Hooks, [ {
-        key: 'add',
-        value: function add(name, callback, first) {
-          if (typeof arguments[0] != 'string') {
-            for (var name in arguments[0]) {
-              this.add(name, arguments[0][name], arguments[1]);
-            }
-            return;
-          }
-          (Array.isArray(name) ? name : [ name ]).forEach(function(name2) {
-            this[name2] = this[name2] || [];
-            if (callback) {
-              this[name2][first ? 'unshift' : 'push'](callback);
-            }
-          }, this);
-        }
-      }, {
-        key: 'run',
-        value: function run(name, env) {
-          this[name] = this[name] || [];
-          this[name].forEach(function(callback) {
-            callback.call(env && env.context ? env.context : env, env);
-          });
-        }
-      } ]);
-      return Hooks;
-    }();
-    var hooks = new Hooks();
-    var defaults = {
-      gamut_mapping: 'css',
-      precision: 5,
-      deltaE: '76',
-      verbose: (globalThis === null || globalThis === void 0 || (_globalThis$process = globalThis.process) === null || _globalThis$process === void 0 || (_globalThis$process = _globalThis$process.env) === null || _globalThis$process === void 0 || (_globalThis$process = _globalThis$process.NODE_ENV) === null || _globalThis$process === void 0 ? void 0 : _globalThis$process.toLowerCase()) !== 'test',
-      warn: function warn(msg) {
-        if (this.verbose) {
-          var _globalThis$console, _globalThis$console$w;
-          globalThis === null || globalThis === void 0 || (_globalThis$console = globalThis.console) === null || _globalThis$console === void 0 || (_globalThis$console$w = _globalThis$console.warn) === null || _globalThis$console$w === void 0 || _globalThis$console$w.call(_globalThis$console, msg);
-        }
+      return virtualNode.children.some(function(child) {
+        return !child.attr('lang') && _hasLangText(child) && !_isHiddenForEveryone(child);
+      });
+    }
+    function insertedIntoFocusOrder(el) {
+      var tabIndex = parseInt(el.getAttribute('tabindex'), 10);
+      return tabIndex > -1 && _isFocusable(el) && !is_natively_focusable_default(el);
+    }
+    var inserted_into_focus_order_default = insertedIntoFocusOrder;
+    function isHiddenWithCSS(el, descendentVisibilityValue) {
+      var _nodeLookup20 = _nodeLookup(el), vNode = _nodeLookup20.vNode, domNode = _nodeLookup20.domNode;
+      if (!vNode) {
+        return _isHiddenWithCSS(domNode, descendentVisibilityValue);
       }
-    };
-    var WHITES = {
-      D50: [ .3457 / .3585, 1, (1 - .3457 - .3585) / .3585 ],
-      D65: [ .3127 / .329, 1, (1 - .3127 - .329) / .329 ]
-    };
-    function getWhite(name) {
-      if (Array.isArray(name)) {
-        return name;
+      if (vNode._isHiddenWithCSS === void 0) {
+        vNode._isHiddenWithCSS = _isHiddenWithCSS(domNode, descendentVisibilityValue);
       }
-      return WHITES[name];
+      return vNode._isHiddenWithCSS;
     }
-    function adapt$2(W1, W2, XYZ) {
-      var options = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
-      W1 = getWhite(W1);
-      W2 = getWhite(W2);
-      if (!W1 || !W2) {
-        throw new TypeError('Missing white point to convert '.concat(!W1 ? 'from' : '').concat(!W1 && !W2 ? '/' : '').concat(!W2 ? 'to' : ''));
+    function _isHiddenWithCSS(el, descendentVisibilityValue) {
+      if (el.nodeType === 9) {
+        return false;
       }
-      if (W1 === W2) {
-        return XYZ;
+      if (el.nodeType === 11) {
+        el = el.host;
       }
-      var env = {
-        W1: W1,
-        W2: W2,
-        XYZ: XYZ,
-        options: options
-      };
-      hooks.run('chromatic-adaptation-start', env);
-      if (!env.M) {
-        if (env.W1 === WHITES.D65 && env.W2 === WHITES.D50) {
-          env.M = [ [ 1.0479297925449969, .022946870601609652, -.05019226628920524 ], [ .02962780877005599, .9904344267538799, -.017073799063418826 ], [ -.009243040646204504, .015055191490298152, .7518742814281371 ] ];
-        } else if (env.W1 === WHITES.D50 && env.W2 === WHITES.D65) {
-          env.M = [ [ .955473421488075, -.02309845494876471, .06325924320057072 ], [ -.0283697093338637, 1.0099953980813041, .021041441191917323 ], [ .012314014864481998, -.020507649298898964, 1.330365926242124 ] ];
-        }
+      if ([ 'STYLE', 'SCRIPT' ].includes(el.nodeName.toUpperCase())) {
+        return false;
       }
-      hooks.run('chromatic-adaptation-end', env);
-      if (env.M) {
-        return multiplyMatrices(env.M, env.XYZ);
-      } else {
-        throw new TypeError('Only Bradford CAT with white points D50 and D65 supported for now.');
+      var style = window.getComputedStyle(el, null);
+      if (!style) {
+        throw new Error('Style does not exist for the given element.');
       }
-    }
-    var noneTypes = new Set([ '', '', '' ]);
-    function coerceCoords(space, format, name, coords) {
-      var types = Object.entries(space.coords).map(function(_ref40, i) {
-        var _ref41 = _slicedToArray(_ref40, 2), id = _ref41[0], coordMeta = _ref41[1];
-        var coordGrammar2 = format.coordGrammar[i];
-        var arg = coords[i];
-        var providedType = arg === null || arg === void 0 ? void 0 : arg.type;
-        var type2;
-        if (arg.none) {
-          type2 = coordGrammar2.find(function(c4) {
-            return noneTypes.has(c4);
-          });
-        } else {
-          type2 = coordGrammar2.find(function(c4) {
-            return c4 == providedType;
-          });
-        }
-        if (!type2) {
-          var coordName = coordMeta.name || id;
-          throw new TypeError(''.concat(providedType !== null && providedType !== void 0 ? providedType : arg.raw, ' not allowed for ').concat(coordName, ' in ').concat(name, '()'));
-        }
-        var fromRange = type2.range;
-        if (providedType === '') {
-          fromRange || (fromRange = [ 0, 1 ]);
-        }
-        var toRange = coordMeta.range || coordMeta.refRange;
-        if (fromRange && toRange) {
-          coords[i] = mapRange(fromRange, toRange, coords[i]);
-        }
-        return type2;
-      });
-      return types;
-    }
-    function parse2(str) {
-      var _String;
-      var _ref42 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, meta = _ref42.meta;
-      var env = {
-        str: (_String = String(str)) === null || _String === void 0 ? void 0 : _String.trim()
-      };
-      hooks.run('parse-start', env);
-      if (env.color) {
-        return env.color;
+      var displayValue = style.getPropertyValue('display');
+      if (displayValue === 'none') {
+        return true;
       }
-      env.parsed = parseFunction(env.str);
-      if (env.parsed) {
-        var name = env.parsed.name;
-        if (name === 'color') {
-          var id = env.parsed.args.shift();
-          var alternateId = id.startsWith('--') ? id.substring(2) : '--'.concat(id);
-          var ids = [ id, alternateId ];
-          var alpha = env.parsed.rawArgs.indexOf('/') > 0 ? env.parsed.args.pop() : 1;
-          var _iterator7 = _createForOfIteratorHelper(ColorSpace.all), _step7;
-          try {
-            for (_iterator7.s(); !(_step7 = _iterator7.n()).done; ) {
-              var space = _step7.value;
-              var colorSpec = space.getFormat('color');
-              if (colorSpec) {
-                var _colorSpec$ids;
-                if (ids.includes(colorSpec.id) || (_colorSpec$ids = colorSpec.ids) !== null && _colorSpec$ids !== void 0 && _colorSpec$ids.filter(function(specId) {
-                  return ids.includes(specId);
-                }).length) {
-                  var coords = Object.keys(space.coords).map(function(_, i) {
-                    return env.parsed.args[i] || 0;
-                  });
-                  var types = void 0;
-                  if (colorSpec.coordGrammar) {
-                    types = coerceCoords(space, colorSpec, 'color', coords);
-                  }
-                  if (meta) {
-                    Object.assign(meta, {
-                      formatId: 'color',
-                      types: types
-                    });
-                  }
-                  if (colorSpec.id.startsWith('--') && !id.startsWith('--')) {
-                    defaults.warn(''.concat(space.name, ' is a non-standard space and not currently supported in the CSS spec. Use prefixed color(').concat(colorSpec.id, ') instead of color(').concat(id, ').'));
-                  }
-                  if (id.startsWith('--') && !colorSpec.id.startsWith('--')) {
-                    defaults.warn(''.concat(space.name, ' is a standard space and supported in the CSS spec. Use color(').concat(colorSpec.id, ') instead of prefixed color(').concat(id, ').'));
-                  }
-                  return {
-                    spaceId: space.id,
-                    coords: coords,
-                    alpha: alpha
-                  };
-                }
-              }
-            }
-          } catch (err) {
-            _iterator7.e(err);
-          } finally {
-            _iterator7.f();
-          }
-          var didYouMean = '';
-          var registryId = id in ColorSpace.registry ? id : alternateId;
-          if (registryId in ColorSpace.registry) {
-            var _ColorSpace$registry$;
-            var cssId = (_ColorSpace$registry$ = ColorSpace.registry[registryId].formats) === null || _ColorSpace$registry$ === void 0 || (_ColorSpace$registry$ = _ColorSpace$registry$.color) === null || _ColorSpace$registry$ === void 0 ? void 0 : _ColorSpace$registry$.id;
-            if (cssId) {
-              didYouMean = 'Did you mean color('.concat(cssId, ')?');
-            }
-          }
-          throw new TypeError('Cannot parse color('.concat(id, '). ') + (didYouMean || 'Missing a plugin?'));
-        } else {
-          var _iterator8 = _createForOfIteratorHelper(ColorSpace.all), _step8;
-          try {
-            for (_iterator8.s(); !(_step8 = _iterator8.n()).done; ) {
-              var _space = _step8.value;
-              var format = _space.getFormat(name);
-              if (format && format.type === 'function') {
-                var _alpha = 1;
-                if (format.lastAlpha || last(env.parsed.args).alpha) {
-                  _alpha = env.parsed.args.pop();
-                }
-                var _coords = env.parsed.args;
-                var _types = void 0;
-                if (format.coordGrammar) {
-                  _types = coerceCoords(_space, format, name, _coords);
-                }
-                if (meta) {
-                  Object.assign(meta, {
-                    formatId: format.name,
-                    types: _types
-                  });
-                }
-                return {
-                  spaceId: _space.id,
-                  coords: _coords,
-                  alpha: _alpha
-                };
-              }
-            }
-          } catch (err) {
-            _iterator8.e(err);
-          } finally {
-            _iterator8.f();
-          }
-        }
-      } else {
-        var _iterator9 = _createForOfIteratorHelper(ColorSpace.all), _step9;
-        try {
-          for (_iterator9.s(); !(_step9 = _iterator9.n()).done; ) {
-            var _space2 = _step9.value;
-            for (var formatId in _space2.formats) {
-              var _format = _space2.formats[formatId];
-              if (_format.type !== 'custom') {
-                continue;
-              }
-              if (_format.test && !_format.test(env.str)) {
-                continue;
-              }
-              var color = _format.parse(env.str);
-              if (color) {
-                var _color$alpha;
-                (_color$alpha = color.alpha) !== null && _color$alpha !== void 0 ? _color$alpha : color.alpha = 1;
-                if (meta) {
-                  meta.formatId = formatId;
-                }
-                return color;
-              }
-            }
-          }
-        } catch (err) {
-          _iterator9.e(err);
-        } finally {
-          _iterator9.f();
-        }
+      var HIDDEN_VISIBILITY_VALUES = [ 'hidden', 'collapse' ];
+      var visibilityValue = style.getPropertyValue('visibility');
+      if (HIDDEN_VISIBILITY_VALUES.includes(visibilityValue) && !descendentVisibilityValue) {
+        return true;
       }
-      throw new TypeError('Could not parse '.concat(str, ' as a color. Missing a plugin?'));
-    }
-    function getColor(color) {
-      if (Array.isArray(color)) {
-        return color.map(getColor);
+      if (HIDDEN_VISIBILITY_VALUES.includes(visibilityValue) && descendentVisibilityValue && HIDDEN_VISIBILITY_VALUES.includes(descendentVisibilityValue)) {
+        return true;
+      }
+      var parent = get_composed_parent_default(el);
+      if (parent && !HIDDEN_VISIBILITY_VALUES.includes(visibilityValue)) {
+        return isHiddenWithCSS(parent, visibilityValue);
       }
-      if (!color) {
-        throw new TypeError('Empty color reference');
+      return false;
+    }
+    var is_hidden_with_css_default = isHiddenWithCSS;
+    function isHTML5(doc) {
+      var node = doc.doctype;
+      if (node === null) {
+        return false;
       }
-      if (isString(color)) {
-        color = parse2(color);
+      return node.name === 'html' && !node.publicId && !node.systemId;
+    }
+    var is_html5_default = isHTML5;
+    function getRoleType(role) {
+      var _window3;
+      if (role instanceof abstract_virtual_node_default || (_window3 = window) !== null && _window3 !== void 0 && _window3.Node && role instanceof window.Node) {
+        role = axe.commons.aria.getRole(role);
       }
-      var space = color.space || color.spaceId;
-      if (!(space instanceof ColorSpace)) {
-        color.space = ColorSpace.get(space);
+      var roleDef = standards_default.ariaRoles[role];
+      return (roleDef === null || roleDef === void 0 ? void 0 : roleDef.type) || null;
+    }
+    var get_role_type_default = getRoleType;
+    function walkDomNode(node, functor) {
+      if (functor(node.actualNode) !== false) {
+        node.children.forEach(function(child) {
+          return walkDomNode(child, functor);
+        });
       }
-      if (color.alpha === void 0) {
-        color.alpha = 1;
+    }
+    var blockLike = [ 'block', 'list-item', 'table', 'flex', 'grid', 'inline-block' ];
+    function isBlock(elm) {
+      var display2 = window.getComputedStyle(elm).getPropertyValue('display');
+      return blockLike.includes(display2) || display2.substr(0, 6) === 'table-';
+    }
+    function getBlockParent(node) {
+      var parentBlock = get_composed_parent_default(node);
+      while (parentBlock && !isBlock(parentBlock)) {
+        parentBlock = get_composed_parent_default(parentBlock);
       }
-      return color;
+      return get_node_from_tree_default(parentBlock);
     }
-    var \u03b5$7 = 75e-6;
-    var _ColorSpace = function() {
-      function _ColorSpace(options) {
-        var _options$coords, _ref43, _options$white, _options$formats, _this$formats$color;
-        _classCallCheck(this, _ColorSpace);
-        this.id = options.id;
-        this.name = options.name;
-        this.base = options.base ? _ColorSpace.get(options.base) : null;
-        this.aliases = options.aliases;
-        if (this.base) {
-          this.fromBase = options.fromBase;
-          this.toBase = options.toBase;
+    function isInTextBlock(node, options) {
+      if (isBlock(node)) {
+        return false;
+      }
+      var virtualParent = getBlockParent(node);
+      var parentText = '';
+      var widgetText = '';
+      var inBrBlock = 0;
+      walkDomNode(virtualParent, function(currNode) {
+        if (inBrBlock === 2) {
+          return false;
         }
-        var coords = (_options$coords = options.coords) !== null && _options$coords !== void 0 ? _options$coords : this.base.coords;
-        for (var name in coords) {
-          if (!('name' in coords[name])) {
-            coords[name].name = name;
-          }
+        if (currNode.nodeType === 3) {
+          parentText += currNode.nodeValue;
         }
-        this.coords = coords;
-        var white2 = (_ref43 = (_options$white = options.white) !== null && _options$white !== void 0 ? _options$white : this.base.white) !== null && _ref43 !== void 0 ? _ref43 : 'D65';
-        this.white = getWhite(white2);
-        this.formats = (_options$formats = options.formats) !== null && _options$formats !== void 0 ? _options$formats : {};
-        for (var _name in this.formats) {
-          var format = this.formats[_name];
-          format.type || (format.type = 'function');
-          format.name || (format.name = _name);
+        if (currNode.nodeType !== 1) {
+          return;
         }
-        if (!((_this$formats$color = this.formats.color) !== null && _this$formats$color !== void 0 && _this$formats$color.id)) {
-          var _this$formats$color2;
-          this.formats.color = _extends({}, (_this$formats$color2 = this.formats.color) !== null && _this$formats$color2 !== void 0 ? _this$formats$color2 : {}, {
-            id: options.cssId || this.id
-          });
+        var nodeName2 = (currNode.nodeName || '').toUpperCase();
+        if (currNode === node) {
+          inBrBlock = 1;
         }
-        if (options.gamutSpace) {
-          this.gamutSpace = options.gamutSpace === 'self' ? this : _ColorSpace.get(options.gamutSpace);
-        } else {
-          if (this.isPolar) {
-            this.gamutSpace = this.base;
+        if ([ 'BR', 'HR' ].includes(nodeName2)) {
+          if (inBrBlock === 0) {
+            parentText = '';
+            widgetText = '';
           } else {
-            this.gamutSpace = this;
+            inBrBlock = 2;
           }
+        } else if (currNode.style.display === 'none' || currNode.style.overflow === 'hidden' || ![ '', null, 'none' ].includes(currNode.style['float']) || ![ '', null, 'relative' ].includes(currNode.style.position)) {
+          return false;
+        } else if (get_role_type_default(currNode) === 'widget') {
+          widgetText += currNode.textContent;
+          return false;
         }
-        if (this.gamutSpace.isUnbounded) {
-          this.inGamut = function(coords2, options2) {
-            return true;
+      });
+      parentText = sanitize_default(parentText);
+      if (options !== null && options !== void 0 && options.noLengthCompare) {
+        return parentText.length !== 0;
+      }
+      widgetText = sanitize_default(widgetText);
+      return parentText.length > widgetText.length;
+    }
+    var is_in_text_block_default = isInTextBlock;
+    function isModalOpen(options) {
+      options = options || {};
+      var modalPercent = options.modalPercent || .75;
+      if (cache_default.get('isModalOpen')) {
+        return cache_default.get('isModalOpen');
+      }
+      var definiteModals = query_selector_all_filter_default(axe._tree[0], 'dialog, [role=dialog], [aria-modal=true]', _isVisibleOnScreen);
+      if (definiteModals.length) {
+        cache_default.set('isModalOpen', true);
+        return true;
+      }
+      var viewport = get_viewport_size_default(window);
+      var percentWidth = viewport.width * modalPercent;
+      var percentHeight = viewport.height * modalPercent;
+      var x = (viewport.width - percentWidth) / 2;
+      var y = (viewport.height - percentHeight) / 2;
+      var points = [ {
+        x: x,
+        y: y
+      }, {
+        x: viewport.width - x,
+        y: y
+      }, {
+        x: viewport.width / 2,
+        y: viewport.height / 2
+      }, {
+        x: x,
+        y: viewport.height - y
+      }, {
+        x: viewport.width - x,
+        y: viewport.height - y
+      } ];
+      var stacks = points.map(function(point) {
+        return Array.from(document.elementsFromPoint(point.x, point.y));
+      });
+      var _loop4 = function _loop4() {
+        var modalElement = stacks[_i10].find(function(elm) {
+          var style = window.getComputedStyle(elm);
+          return parseInt(style.width, 10) >= percentWidth && parseInt(style.height, 10) >= percentHeight && style.getPropertyValue('pointer-events') !== 'none' && (style.position === 'absolute' || style.position === 'fixed');
+        });
+        if (modalElement && stacks.every(function(stack) {
+          return stack.includes(modalElement);
+        })) {
+          cache_default.set('isModalOpen', true);
+          return {
+            v: true
           };
         }
-        this.referred = options.referred;
-        Object.defineProperty(this, 'path', {
-          value: getPath(this).reverse(),
-          writable: false,
-          enumerable: true,
-          configurable: true
+      }, _ret;
+      for (var _i10 = 0; _i10 < stacks.length; _i10++) {
+        _ret = _loop4();
+        if (_ret) {
+          return _ret.v;
+        }
+      }
+      cache_default.set('isModalOpen', void 0);
+      return void 0;
+    }
+    var is_modal_open_default = isModalOpen;
+    function _isMultiline(domNode) {
+      var margin = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 2;
+      var range2 = domNode.ownerDocument.createRange();
+      range2.setStart(domNode, 0);
+      range2.setEnd(domNode, domNode.childNodes.length);
+      var lastLineEnd = 0;
+      var lineCount = 0;
+      var _iterator5 = _createForOfIteratorHelper(range2.getClientRects()), _step5;
+      try {
+        for (_iterator5.s(); !(_step5 = _iterator5.n()).done; ) {
+          var rect = _step5.value;
+          if (rect.height <= margin) {
+            continue;
+          }
+          if (lastLineEnd > rect.top + margin) {
+            lastLineEnd = Math.max(lastLineEnd, rect.bottom);
+          } else if (lineCount === 0) {
+            lastLineEnd = rect.bottom;
+            lineCount++;
+          } else {
+            return true;
+          }
+        }
+      } catch (err) {
+        _iterator5.e(err);
+      } finally {
+        _iterator5.f();
+      }
+      return false;
+    }
+    function isNode(element) {
+      return element instanceof window.Node;
+    }
+    var is_node_default = isNode;
+    var cacheKey = 'color.incompleteData';
+    var incompleteData = {
+      set: function set(key, reason) {
+        if (typeof key !== 'string') {
+          throw new Error('Incomplete data: key must be a string');
+        }
+        var data = cache_default.get(cacheKey, function() {
+          return {};
+        });
+        if (reason) {
+          data[key] = reason;
+        }
+        return data[key];
+      },
+      get: function get(key) {
+        var data = cache_default.get(cacheKey);
+        return data === null || data === void 0 ? void 0 : data[key];
+      },
+      clear: function clear() {
+        cache_default.set(cacheKey, {});
+      }
+    };
+    var incomplete_data_default = incompleteData;
+    function elementHasImage(elm, style) {
+      var graphicNodes = [ 'IMG', 'CANVAS', 'OBJECT', 'IFRAME', 'VIDEO', 'SVG' ];
+      var nodeName2 = elm.nodeName.toUpperCase();
+      if (graphicNodes.includes(nodeName2)) {
+        incomplete_data_default.set('bgColor', 'imgNode');
+        return true;
+      }
+      style = style || window.getComputedStyle(elm);
+      var bgImageStyle = style.getPropertyValue('background-image');
+      var hasBgImage = bgImageStyle !== 'none';
+      if (hasBgImage) {
+        var hasGradient = /gradient/.test(bgImageStyle);
+        incomplete_data_default.set('bgColor', hasGradient ? 'bgGradient' : 'bgImage');
+      }
+      return hasBgImage;
+    }
+    var element_has_image_default = elementHasImage;
+    var imports_exports = {};
+    __export(imports_exports, {
+      ArrayFrom: function ArrayFrom() {
+        return import_from2['default'];
+      },
+      Colorjs: function Colorjs() {
+        return Color;
+      },
+      CssSelectorParser: function CssSelectorParser() {
+        return import_css_selector_parser2.CssSelectorParser;
+      },
+      doT: function doT() {
+        return import_dot['default'];
+      },
+      emojiRegexText: function emojiRegexText() {
+        return emoji_regex_default;
+      },
+      memoize: function memoize() {
+        return import_memoizee2['default'];
+      }
+    });
+    var import_es6_promise = __toModule(require_es6_promise());
+    var import_typedarray = __toModule(require_typedarray());
+    var import_weakmap_polyfill = __toModule(require_weakmap_polyfill());
+    var import_has_own = __toModule(require_has_own3());
+    var import_values = __toModule(require_values3());
+    var import_from = __toModule(require_from4());
+    if (!('hasOwn' in Object)) {
+      Object.hasOwn = import_has_own['default'];
+    }
+    if (!('values' in Object)) {
+      Object.values = import_values['default'];
+    }
+    if (!('Promise' in window)) {
+      import_es6_promise['default'].polyfill();
+    }
+    if (!('Uint32Array' in window)) {
+      window.Uint32Array = import_typedarray.Uint32Array;
+    }
+    if (window.Uint32Array) {
+      if (!('some' in window.Uint32Array.prototype)) {
+        Object.defineProperty(window.Uint32Array.prototype, 'some', {
+          value: Array.prototype.some
         });
-        hooks.run('colorspace-init-end', this);
       }
-      _createClass(_ColorSpace, [ {
-        key: 'inGamut',
-        value: function inGamut(coords) {
-          var _ref44 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, _ref44$epsilon = _ref44.epsilon, epsilon = _ref44$epsilon === void 0 ? \u03b5$7 : _ref44$epsilon;
-          if (!this.equals(this.gamutSpace)) {
-            coords = this.to(this.gamutSpace, coords);
-            return this.gamutSpace.inGamut(coords, {
-              epsilon: epsilon
-            });
+      if (!('reduce' in window.Uint32Array.prototype)) {
+        Object.defineProperty(window.Uint32Array.prototype, 'reduce', {
+          value: Array.prototype.reduce
+        });
+      }
+    }
+    if (typeof Object.assign !== 'function') {
+      (function() {
+        Object.assign = function(target) {
+          if (target === void 0 || target === null) {
+            throw new TypeError('Cannot convert undefined or null to object');
           }
-          var coordMeta = Object.values(this.coords);
-          return coords.every(function(c4, i) {
-            var meta = coordMeta[i];
-            if (meta.type !== 'angle' && meta.range) {
-              if (Number.isNaN(c4)) {
-                return true;
+          var output = Object(target);
+          for (var index = 1; index < arguments.length; index++) {
+            var source = arguments[index];
+            if (source !== void 0 && source !== null) {
+              for (var nextKey in source) {
+                if (source.hasOwnProperty(nextKey)) {
+                  output[nextKey] = source[nextKey];
+                }
               }
-              var _meta$range = _slicedToArray(meta.range, 2), min = _meta$range[0], max2 = _meta$range[1];
-              return (min === void 0 || c4 >= min - epsilon) && (max2 === void 0 || c4 <= max2 + epsilon);
             }
-            return true;
-          });
-        }
-      }, {
-        key: 'isUnbounded',
-        get: function get() {
-          return Object.values(this.coords).every(function(coord) {
-            return !('range' in coord);
-          });
-        }
-      }, {
-        key: 'cssId',
-        get: function get() {
-          var _this$formats;
-          return ((_this$formats = this.formats) === null || _this$formats === void 0 || (_this$formats = _this$formats.color) === null || _this$formats === void 0 ? void 0 : _this$formats.id) || this.id;
-        }
-      }, {
-        key: 'isPolar',
-        get: function get() {
-          for (var id in this.coords) {
-            if (this.coords[id].type === 'angle') {
-              return true;
+          }
+          return output;
+        };
+      })();
+    }
+    if (!Array.prototype.find) {
+      Object.defineProperty(Array.prototype, 'find', {
+        value: function value(predicate) {
+          if (this === null) {
+            throw new TypeError('Array.prototype.find called on null or undefined');
+          }
+          if (typeof predicate !== 'function') {
+            throw new TypeError('predicate must be a function');
+          }
+          var list = Object(this);
+          var length = list.length >>> 0;
+          var thisArg = arguments[1];
+          var value;
+          for (var i = 0; i < length; i++) {
+            value = list[i];
+            if (predicate.call(thisArg, value, i, list)) {
+              return value;
             }
           }
-          return false;
+          return void 0;
         }
-      }, {
-        key: 'getFormat',
-        value: function getFormat(format) {
-          if (_typeof(format) === 'object') {
-            format = processFormat(format, this);
-            return format;
+      });
+    }
+    if (!Array.prototype.findIndex) {
+      Object.defineProperty(Array.prototype, 'findIndex', {
+        value: function value(predicate, thisArg) {
+          if (this === null) {
+            throw new TypeError('Array.prototype.find called on null or undefined');
           }
-          var ret;
-          if (format === 'default') {
-            ret = Object.values(this.formats)[0];
-          } else {
-            ret = this.formats[format];
+          if (typeof predicate !== 'function') {
+            throw new TypeError('predicate must be a function');
           }
-          if (ret) {
-            ret = processFormat(ret, this);
-            return ret;
+          var list = Object(this);
+          var length = list.length >>> 0;
+          var value;
+          for (var i = 0; i < length; i++) {
+            value = list[i];
+            if (predicate.call(thisArg, value, i, list)) {
+              return i;
+            }
           }
-          return null;
+          return -1;
         }
-      }, {
-        key: 'equals',
-        value: function equals(space) {
-          if (!space) {
+      });
+    }
+    if (!Array.prototype.includes) {
+      Object.defineProperty(Array.prototype, 'includes', {
+        value: function value(searchElement) {
+          var O = Object(this);
+          var len = parseInt(O.length, 10) || 0;
+          if (len === 0) {
             return false;
           }
-          return this === space || this.id === space || this.id === space.id;
-        }
-      }, {
-        key: 'to',
-        value: function to(space, coords) {
-          if (arguments.length === 1) {
-            var color = getColor(space);
-            var _ref45 = [ color.space, color.coords ];
-            space = _ref45[0];
-            coords = _ref45[1];
-          }
-          space = _ColorSpace.get(space);
-          if (this.equals(space)) {
-            return coords;
-          }
-          coords = coords.map(function(c4) {
-            return Number.isNaN(c4) ? 0 : c4;
-          });
-          var myPath = this.path;
-          var otherPath = space.path;
-          var connectionSpace, connectionSpaceIndex;
-          for (var _i12 = 0; _i12 < myPath.length; _i12++) {
-            if (myPath[_i12].equals(otherPath[_i12])) {
-              connectionSpace = myPath[_i12];
-              connectionSpaceIndex = _i12;
-            } else {
-              break;
+          var n2 = parseInt(arguments[1], 10) || 0;
+          var k;
+          if (n2 >= 0) {
+            k = n2;
+          } else {
+            k = len + n2;
+            if (k < 0) {
+              k = 0;
             }
           }
-          if (!connectionSpace) {
-            throw new Error('Cannot convert between color spaces '.concat(this, ' and ').concat(space, ': no connection space was found'));
+          var currentElement;
+          while (k < len) {
+            currentElement = O[k];
+            if (searchElement === currentElement || searchElement !== searchElement && currentElement !== currentElement) {
+              return true;
+            }
+            k++;
           }
-          for (var _i13 = myPath.length - 1; _i13 > connectionSpaceIndex; _i13--) {
-            coords = myPath[_i13].toBase(coords);
+          return false;
+        }
+      });
+    }
+    if (!Array.prototype.some) {
+      Object.defineProperty(Array.prototype, 'some', {
+        value: function value(fun) {
+          if (this == null) {
+            throw new TypeError('Array.prototype.some called on null or undefined');
           }
-          for (var _i14 = connectionSpaceIndex + 1; _i14 < otherPath.length; _i14++) {
-            coords = otherPath[_i14].fromBase(coords);
+          if (typeof fun !== 'function') {
+            throw new TypeError();
           }
-          return coords;
-        }
-      }, {
-        key: 'from',
-        value: function from(space, coords) {
-          if (arguments.length === 1) {
-            var color = getColor(space);
-            var _ref46 = [ color.space, color.coords ];
-            space = _ref46[0];
-            coords = _ref46[1];
+          var t = Object(this);
+          var len = t.length >>> 0;
+          var thisArg = arguments.length >= 2 ? arguments[1] : void 0;
+          for (var i = 0; i < len; i++) {
+            if (i in t && fun.call(thisArg, t[i], i, t)) {
+              return true;
+            }
           }
-          space = _ColorSpace.get(space);
-          return space.to(this, coords);
-        }
-      }, {
-        key: 'toString',
-        value: function toString() {
-          return ''.concat(this.name, ' (').concat(this.id, ')');
+          return false;
         }
-      }, {
-        key: 'getMinCoords',
-        value: function getMinCoords() {
-          var ret = [];
-          for (var id in this.coords) {
-            var _range2$min;
-            var meta = this.coords[id];
-            var range2 = meta.range || meta.refRange;
-            ret.push((_range2$min = range2 === null || range2 === void 0 ? void 0 : range2.min) !== null && _range2$min !== void 0 ? _range2$min : 0);
-          }
-          return ret;
+      });
+    }
+    if (!Array.from) {
+      Array.from = import_from['default'];
+    }
+    if (!String.prototype.includes) {
+      String.prototype.includes = function(search, start) {
+        if (typeof start !== 'number') {
+          start = 0;
         }
-      } ], [ {
-        key: 'all',
-        get: function get() {
-          return _toConsumableArray(new Set(Object.values(_ColorSpace.registry)));
+        if (start + search.length > this.length) {
+          return false;
+        } else {
+          return this.indexOf(search, start) !== -1;
         }
-      }, {
-        key: 'register',
-        value: function register(id, space) {
-          if (arguments.length === 1) {
-            space = arguments[0];
-            id = space.id;
-          }
-          space = this.get(space);
-          if (this.registry[id] && this.registry[id] !== space) {
-            throw new Error('Duplicate color space registration: \''.concat(id, '\''));
-          }
-          this.registry[id] = space;
-          if (arguments.length === 1 && space.aliases) {
-            var _iterator10 = _createForOfIteratorHelper(space.aliases), _step10;
+      };
+    }
+    if (!Array.prototype.flat) {
+      Object.defineProperty(Array.prototype, 'flat', {
+        configurable: true,
+        value: function flat() {
+          var depth = isNaN(arguments[0]) ? 1 : Number(arguments[0]);
+          return depth ? Array.prototype.reduce.call(this, function(acc, cur) {
+            if (Array.isArray(cur)) {
+              acc.push.apply(acc, flat.call(cur, depth - 1));
+            } else {
+              acc.push(cur);
+            }
+            return acc;
+          }, []) : Array.prototype.slice.call(this);
+        },
+        writable: true
+      });
+    }
+    if (window.Node && !('isConnected' in window.Node.prototype)) {
+      Object.defineProperty(window.Node.prototype, 'isConnected', {
+        get: function get() {
+          return !this.ownerDocument || !(this.ownerDocument.compareDocumentPosition(this) & this.DOCUMENT_POSITION_DISCONNECTED);
+        }
+      });
+    }
+    var import_css_selector_parser2 = __toModule(require_lib());
+    var import_dot = __toModule(require_doT());
+    var import_memoizee2 = __toModule(require_memoizee());
+    function multiplyMatrices(A, B) {
+      var m3 = A.length;
+      if (!Array.isArray(A[0])) {
+        A = [ A ];
+      }
+      if (!Array.isArray(B[0])) {
+        B = B.map(function(x) {
+          return [ x ];
+        });
+      }
+      var p2 = B[0].length;
+      var B_cols = B[0].map(function(_, i) {
+        return B.map(function(x) {
+          return x[i];
+        });
+      });
+      var product = A.map(function(row) {
+        return B_cols.map(function(col) {
+          var ret = 0;
+          if (!Array.isArray(row)) {
+            var _iterator6 = _createForOfIteratorHelper(col), _step6;
             try {
-              for (_iterator10.s(); !(_step10 = _iterator10.n()).done; ) {
-                var alias = _step10.value;
-                this.register(alias, space);
+              for (_iterator6.s(); !(_step6 = _iterator6.n()).done; ) {
+                var c4 = _step6.value;
+                ret += row * c4;
               }
             } catch (err) {
-              _iterator10.e(err);
+              _iterator6.e(err);
             } finally {
-              _iterator10.f();
-            }
-          }
-          return space;
-        }
-      }, {
-        key: 'get',
-        value: function get(space) {
-          if (!space || space instanceof _ColorSpace) {
-            return space;
-          }
-          var argType = type(space);
-          if (argType === 'string') {
-            var ret = _ColorSpace.registry[space.toLowerCase()];
-            if (!ret) {
-              throw new TypeError('No color space found with id = "'.concat(space, '"'));
+              _iterator6.f();
             }
             return ret;
           }
-          for (var _len2 = arguments.length, alternatives = new Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) {
-            alternatives[_key2 - 1] = arguments[_key2];
-          }
-          if (alternatives.length) {
-            return _ColorSpace.get.apply(_ColorSpace, alternatives);
-          }
-          throw new TypeError(''.concat(space, ' is not a valid color space'));
-        }
-      }, {
-        key: 'resolveCoord',
-        value: function resolveCoord(ref, workingSpace) {
-          var coordType = type(ref);
-          var space, coord;
-          if (coordType === 'string') {
-            if (ref.includes('.')) {
-              var _ref$split = ref.split('.');
-              var _ref$split2 = _slicedToArray(_ref$split, 2);
-              space = _ref$split2[0];
-              coord = _ref$split2[1];
-            } else {
-              space = void 0;
-              coord = ref;
-            }
-          } else if (Array.isArray(ref)) {
-            var _ref47 = _slicedToArray(ref, 2);
-            space = _ref47[0];
-            coord = _ref47[1];
-          } else {
-            space = ref.space;
-            coord = ref.coordId;
-          }
-          space = _ColorSpace.get(space);
-          if (!space) {
-            space = workingSpace;
-          }
-          if (!space) {
-            throw new TypeError('Cannot resolve coordinate reference '.concat(ref, ': No color space specified and relative references are not allowed here'));
-          }
-          coordType = type(coord);
-          if (coordType === 'number' || coordType === 'string' && coord >= 0) {
-            var meta = Object.entries(space.coords)[coord];
-            if (meta) {
-              return _extends({
-                space: space,
-                id: meta[0],
-                index: coord
-              }, meta[1]);
-            }
-          }
-          space = _ColorSpace.get(space);
-          var normalizedCoord = coord.toLowerCase();
-          var i = 0;
-          for (var id in space.coords) {
-            var _meta$name;
-            var _meta = space.coords[id];
-            if (id.toLowerCase() === normalizedCoord || ((_meta$name = _meta.name) === null || _meta$name === void 0 ? void 0 : _meta$name.toLowerCase()) === normalizedCoord) {
-              return _extends({
-                space: space,
-                id: id,
-                index: i
-              }, _meta);
-            }
-            i++;
+          for (var _i11 = 0; _i11 < row.length; _i11++) {
+            ret += row[_i11] * (col[_i11] || 0);
           }
-          throw new TypeError('No "'.concat(coord, '" coordinate found in ').concat(space.name, '. Its coordinates are: ').concat(Object.keys(space.coords).join(', ')));
-        }
-      } ]);
-      return _ColorSpace;
-    }();
-    var ColorSpace = _ColorSpace;
-    __publicField(ColorSpace, 'registry', {});
-    __publicField(ColorSpace, 'DEFAULT_FORMAT', {
-      type: 'functions',
-      name: 'color'
-    });
-    function getPath(space) {
-      var ret = [ space ];
-      for (var s = space; s = s.base; ) {
-        ret.push(s);
+          return ret;
+        });
+      });
+      if (m3 === 1) {
+        product = product[0];
       }
-      return ret;
+      if (p2 === 1) {
+        return product.map(function(x) {
+          return x[0];
+        });
+      }
+      return product;
     }
-    function processFormat(format) {
-      var _ref48 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, coords = _ref48.coords;
-      if (format.coords && !format.coordGrammar) {
-        format.type || (format.type = 'function');
-        format.name || (format.name = 'color');
-        format.coordGrammar = parseCoordGrammar(format.coords);
-        var coordFormats = Object.entries(coords).map(function(_ref49, i) {
-          var _ref50 = _slicedToArray(_ref49, 2), id = _ref50[0], coordMeta = _ref50[1];
-          var outputType = format.coordGrammar[i][0];
-          var fromRange = coordMeta.range || coordMeta.refRange;
-          var toRange = outputType.range, suffix = '';
-          if (outputType == '') {
-            toRange = [ 0, 100 ];
-            suffix = '%';
-          } else if (outputType == '') {
-            suffix = 'deg';
+    function isString(str) {
+      return type(str) === 'string';
+    }
+    function type(o) {
+      var str = Object.prototype.toString.call(o);
+      return (str.match(/^\[object\s+(.*?)\]$/)[1] || '').toLowerCase();
+    }
+    function toPrecision(n2, precision) {
+      n2 = +n2;
+      precision = +precision;
+      var integerLength = (Math.floor(n2) + '').length;
+      if (precision > integerLength) {
+        return +n2.toFixed(precision - integerLength);
+      } else {
+        var p10 = Math.pow(10, integerLength - precision);
+        return Math.round(n2 / p10) * p10;
+      }
+    }
+    function parseFunction(str) {
+      if (!str) {
+        return;
+      }
+      str = str.trim();
+      var isFunctionRegex = /^([a-z]+)\((.+?)\)$/i;
+      var isNumberRegex = /^-?[\d.]+$/;
+      var parts = str.match(isFunctionRegex);
+      if (parts) {
+        var args = [];
+        parts[2].replace(/\/?\s*([-\w.]+(?:%|deg)?)/g, function($0, arg) {
+          if (/%$/.test(arg)) {
+            arg = new Number(arg.slice(0, -1) / 100);
+            arg.type = '';
+          } else if (/deg$/.test(arg)) {
+            arg = new Number(+arg.slice(0, -3));
+            arg.type = '';
+            arg.unit = 'deg';
+          } else if (isNumberRegex.test(arg)) {
+            arg = new Number(arg);
+            arg.type = '';
           }
-          return {
-            fromRange: fromRange,
-            toRange: toRange,
-            suffix: suffix
-          };
+          if ($0.startsWith('/')) {
+            arg = arg instanceof Number ? arg : new Number(arg);
+            arg.alpha = true;
+          }
+          args.push(arg);
         });
-        format.serializeCoords = function(coords2, precision) {
-          return coords2.map(function(c4, i) {
-            var _coordFormats$i = coordFormats[i], fromRange = _coordFormats$i.fromRange, toRange = _coordFormats$i.toRange, suffix = _coordFormats$i.suffix;
-            if (fromRange && toRange) {
-              c4 = mapRange(fromRange, toRange, c4);
-            }
-            c4 = serializeNumber(c4, {
-              precision: precision,
-              unit: suffix
-            });
-            return c4;
-          });
+        return {
+          name: parts[1].toLowerCase(),
+          rawName: parts[1],
+          rawArgs: parts[2],
+          args: args
         };
       }
-      return format;
     }
-    var xyz_d65 = new ColorSpace({
-      id: 'xyz-d65',
-      name: 'XYZ D65',
-      coords: {
-        x: {
-          name: 'X'
-        },
-        y: {
-          name: 'Y'
-        },
-        z: {
-          name: 'Z'
-        }
-      },
-      white: 'D65',
-      formats: {
-        color: {
-          ids: [ 'xyz-d65', 'xyz' ]
-        }
-      },
-      aliases: [ 'xyz' ]
-    });
-    var RGBColorSpace = function(_ColorSpace2) {
-      _inherits(RGBColorSpace, _ColorSpace2);
-      function RGBColorSpace(options) {
-        var _options$referred;
-        var _this;
-        _classCallCheck(this, RGBColorSpace);
-        if (!options.coords) {
-          options.coords = {
-            r: {
-              range: [ 0, 1 ],
-              name: 'Red'
-            },
-            g: {
-              range: [ 0, 1 ],
-              name: 'Green'
-            },
-            b: {
-              range: [ 0, 1 ],
-              name: 'Blue'
-            }
-          };
-        }
-        if (!options.base) {
-          options.base = xyz_d65;
-        }
-        if (options.toXYZ_M && options.fromXYZ_M) {
-          var _options$toBase, _options$fromBase;
-          (_options$toBase = options.toBase) !== null && _options$toBase !== void 0 ? _options$toBase : options.toBase = function(rgb) {
-            var xyz = multiplyMatrices(options.toXYZ_M, rgb);
-            if (_this.white !== _this.base.white) {
-              xyz = adapt$2(_this.white, _this.base.white, xyz);
-            }
-            return xyz;
-          };
-          (_options$fromBase = options.fromBase) !== null && _options$fromBase !== void 0 ? _options$fromBase : options.fromBase = function(xyz) {
-            xyz = adapt$2(_this.base.white, _this.white, xyz);
-            return multiplyMatrices(options.fromXYZ_M, xyz);
-          };
-        }
-        (_options$referred = options.referred) !== null && _options$referred !== void 0 ? _options$referred : options.referred = 'display';
-        return _this = _callSuper(this, RGBColorSpace, [ options ]);
+    function last(arr) {
+      return arr[arr.length - 1];
+    }
+    function interpolate(start, end, p2) {
+      if (isNaN(start)) {
+        return end;
       }
-      return _createClass(RGBColorSpace);
-    }(ColorSpace);
-    function getAll(color, space) {
-      color = getColor(color);
-      if (!space || color.space.equals(space)) {
-        return color.coords.slice();
+      if (isNaN(end)) {
+        return start;
       }
-      space = ColorSpace.get(space);
-      return space.from(color);
+      return start + (end - start) * p2;
     }
-    function get(color, prop) {
-      color = getColor(color);
-      var _ColorSpace$resolveCo = ColorSpace.resolveCoord(prop, color.space), space = _ColorSpace$resolveCo.space, index = _ColorSpace$resolveCo.index;
-      var coords = getAll(color, space);
-      return coords[index];
+    function interpolateInv(start, end, value) {
+      return (value - start) / (end - start);
     }
-    function setAll(color, space, coords) {
-      color = getColor(color);
-      space = ColorSpace.get(space);
-      color.coords = space.to(color.space, coords);
-      return color;
+    function mapRange(from, to2, value) {
+      return interpolate(to2[0], to2[1], interpolateInv(from[0], from[1], value));
     }
-    setAll.returns = 'color';
-    function set(color, prop, value) {
-      color = getColor(color);
-      if (arguments.length === 2 && type(arguments[1]) === 'object') {
-        var object = arguments[1];
-        for (var p2 in object) {
-          set(color, p2, object[p2]);
+    function parseCoordGrammar(coordGrammars) {
+      return coordGrammars.map(function(coordGrammar2) {
+        return coordGrammar2.split('|').map(function(type2) {
+          type2 = type2.trim();
+          var range2 = type2.match(/^(<[a-z]+>)\[(-?[.\d]+),\s*(-?[.\d]+)\]?$/);
+          if (range2) {
+            var ret = new String(range2[1]);
+            ret.range = [ +range2[2], +range2[3] ];
+            return ret;
+          }
+          return type2;
+        });
+      });
+    }
+    var util = Object.freeze({
+      __proto__: null,
+      isString: isString,
+      type: type,
+      toPrecision: toPrecision,
+      parseFunction: parseFunction,
+      last: last,
+      interpolate: interpolate,
+      interpolateInv: interpolateInv,
+      mapRange: mapRange,
+      parseCoordGrammar: parseCoordGrammar,
+      multiplyMatrices: multiplyMatrices
+    });
+    var Hooks = function() {
+      function Hooks() {
+        _classCallCheck(this, Hooks);
+      }
+      return _createClass(Hooks, [ {
+        key: 'add',
+        value: function add(name, callback, first) {
+          if (typeof arguments[0] != 'string') {
+            for (var name in arguments[0]) {
+              this.add(name, arguments[0][name], arguments[1]);
+            }
+            return;
+          }
+          (Array.isArray(name) ? name : [ name ]).forEach(function(name2) {
+            this[name2] = this[name2] || [];
+            if (callback) {
+              this[name2][first ? 'unshift' : 'push'](callback);
+            }
+          }, this);
         }
-      } else {
-        if (typeof value === 'function') {
-          value = value(get(color, prop));
+      }, {
+        key: 'run',
+        value: function run(name, env) {
+          this[name] = this[name] || [];
+          this[name].forEach(function(callback) {
+            callback.call(env && env.context ? env.context : env, env);
+          });
         }
-        var _ColorSpace$resolveCo2 = ColorSpace.resolveCoord(prop, color.space), space = _ColorSpace$resolveCo2.space, index = _ColorSpace$resolveCo2.index;
-        var coords = getAll(color, space);
-        coords[index] = value;
-        setAll(color, space, coords);
+      } ]);
+    }();
+    var hooks = new Hooks();
+    var defaults = {
+      gamut_mapping: 'lch.c',
+      precision: 5,
+      deltaE: '76'
+    };
+    var WHITES = {
+      D50: [ .3457 / .3585, 1, (1 - .3457 - .3585) / .3585 ],
+      D65: [ .3127 / .329, 1, (1 - .3127 - .329) / .329 ]
+    };
+    function getWhite(name) {
+      if (Array.isArray(name)) {
+        return name;
       }
-      return color;
+      return WHITES[name];
     }
-    set.returns = 'color';
-    var XYZ_D50 = new ColorSpace({
-      id: 'xyz-d50',
-      name: 'XYZ D50',
-      white: 'D50',
-      base: xyz_d65,
-      fromBase: function fromBase(coords) {
-        return adapt$2(xyz_d65.white, 'D50', coords);
-      },
-      toBase: function toBase(coords) {
-        return adapt$2('D50', xyz_d65.white, coords);
+    function adapt$1(W1, W2, XYZ) {
+      var options = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
+      W1 = getWhite(W1);
+      W2 = getWhite(W2);
+      if (!W1 || !W2) {
+        throw new TypeError('Missing white point to convert '.concat(!W1 ? 'from' : '').concat(!W1 && !W2 ? '/' : '').concat(!W2 ? 'to' : ''));
       }
-    });
-    var \u03b5$6 = 216 / 24389;
-    var \u03b53$1 = 24 / 116;
-    var \u03ba$4 = 24389 / 27;
-    var white$4 = WHITES.D50;
-    var lab = new ColorSpace({
-      id: 'lab',
-      name: 'Lab',
-      coords: {
-        l: {
-          refRange: [ 0, 100 ],
-          name: 'Lightness'
-        },
-        a: {
-          refRange: [ -125, 125 ]
-        },
-        b: {
-          refRange: [ -125, 125 ]
-        }
-      },
-      white: white$4,
-      base: XYZ_D50,
-      fromBase: function fromBase(XYZ) {
-        var xyz = XYZ.map(function(value, i) {
-          return value / white$4[i];
-        });
-        var f = xyz.map(function(value) {
-          return value > \u03b5$6 ? Math.cbrt(value) : (\u03ba$4 * value + 16) / 116;
-        });
-        return [ 116 * f[1] - 16, 500 * (f[0] - f[1]), 200 * (f[1] - f[2]) ];
-      },
-      toBase: function toBase(Lab) {
-        var f = [];
-        f[1] = (Lab[0] + 16) / 116;
-        f[0] = Lab[1] / 500 + f[1];
-        f[2] = f[1] - Lab[2] / 200;
-        var xyz = [ f[0] > \u03b53$1 ? Math.pow(f[0], 3) : (116 * f[0] - 16) / \u03ba$4, Lab[0] > 8 ? Math.pow((Lab[0] + 16) / 116, 3) : Lab[0] / \u03ba$4, f[2] > \u03b53$1 ? Math.pow(f[2], 3) : (116 * f[2] - 16) / \u03ba$4 ];
-        return xyz.map(function(value, i) {
-          return value * white$4[i];
-        });
-      },
-      formats: {
-        lab: {
-          coords: [ ' | ', ' | [-1,1]', ' | [-1,1]' ]
+      if (W1 === W2) {
+        return XYZ;
+      }
+      var env = {
+        W1: W1,
+        W2: W2,
+        XYZ: XYZ,
+        options: options
+      };
+      hooks.run('chromatic-adaptation-start', env);
+      if (!env.M) {
+        if (env.W1 === WHITES.D65 && env.W2 === WHITES.D50) {
+          env.M = [ [ 1.0479298208405488, .022946793341019088, -.05019222954313557 ], [ .029627815688159344, .990434484573249, -.01707382502938514 ], [ -.009243058152591178, .015055144896577895, .7518742899580008 ] ];
+        } else if (env.W1 === WHITES.D50 && env.W2 === WHITES.D65) {
+          env.M = [ [ .9554734527042182, -.023098536874261423, .0632593086610217 ], [ -.028369706963208136, 1.0099954580058226, .021041398966943008 ], [ .012314001688319899, -.020507696433477912, 1.3303659366080753 ] ];
         }
       }
-    });
-    function constrain(angle) {
-      return (angle % 360 + 360) % 360;
+      hooks.run('chromatic-adaptation-end', env);
+      if (env.M) {
+        return multiplyMatrices(env.M, env.XYZ);
+      } else {
+        throw new TypeError('Only Bradford CAT with white points D50 and D65 supported for now.');
+      }
     }
-    function adjust(arc, angles) {
-      if (arc === 'raw') {
-        return angles;
+    var \u03b5$4 = 75e-6;
+    var _ColorSpace = (_Class_brand = new WeakSet(), _path = new WeakMap(), function() {
+      function _ColorSpace(options) {
+        var _options$coords, _ref39, _options$white, _options$formats, _this$formats$functio, _this$formats, _this$formats2;
+        _classCallCheck(this, _ColorSpace);
+        _classPrivateMethodInitSpec(this, _Class_brand);
+        _classPrivateFieldInitSpec(this, _path, void 0);
+        this.id = options.id;
+        this.name = options.name;
+        this.base = options.base ? _ColorSpace.get(options.base) : null;
+        this.aliases = options.aliases;
+        if (this.base) {
+          this.fromBase = options.fromBase;
+          this.toBase = options.toBase;
+        }
+        var _coords = (_options$coords = options.coords) !== null && _options$coords !== void 0 ? _options$coords : this.base.coords;
+        this.coords = _coords;
+        var white2 = (_ref39 = (_options$white = options.white) !== null && _options$white !== void 0 ? _options$white : this.base.white) !== null && _ref39 !== void 0 ? _ref39 : 'D65';
+        this.white = getWhite(white2);
+        this.formats = (_options$formats = options.formats) !== null && _options$formats !== void 0 ? _options$formats : {};
+        for (var name in this.formats) {
+          var format = this.formats[name];
+          format.type || (format.type = 'function');
+          format.name || (format.name = name);
+        }
+        if (options.cssId && !((_this$formats$functio = this.formats.functions) !== null && _this$formats$functio !== void 0 && _this$formats$functio.color)) {
+          this.formats.color = {
+            id: options.cssId
+          };
+          Object.defineProperty(this, 'cssId', {
+            value: options.cssId
+          });
+        } else if ((_this$formats = this.formats) !== null && _this$formats !== void 0 && _this$formats.color && !((_this$formats2 = this.formats) !== null && _this$formats2 !== void 0 && _this$formats2.color.id)) {
+          this.formats.color.id = this.id;
+        }
+        this.referred = options.referred;
+        _classPrivateFieldSet(_path, this, _assertClassBrand(_Class_brand, this, _getPath).call(this).reverse());
+        hooks.run('colorspace-init-end', this);
       }
-      var _angles$map = angles.map(constrain), _angles$map2 = _slicedToArray(_angles$map, 2), a1 = _angles$map2[0], a2 = _angles$map2[1];
-      var angleDiff = a2 - a1;
-      if (arc === 'increasing') {
-        if (angleDiff < 0) {
-          a2 += 360;
+      return _createClass(_ColorSpace, [ {
+        key: 'inGamut',
+        value: function inGamut(coords) {
+          var _ref40 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, _ref40$epsilon = _ref40.epsilon, epsilon = _ref40$epsilon === void 0 ? \u03b5$4 : _ref40$epsilon;
+          if (this.isPolar) {
+            coords = this.toBase(coords);
+            return this.base.inGamut(coords, {
+              epsilon: epsilon
+            });
+          }
+          var coordMeta = Object.values(this.coords);
+          return coords.every(function(c4, i) {
+            var meta = coordMeta[i];
+            if (meta.type !== 'angle' && meta.range) {
+              if (Number.isNaN(c4)) {
+                return true;
+              }
+              var _meta$range = _slicedToArray(meta.range, 2), min = _meta$range[0], max2 = _meta$range[1];
+              return (min === void 0 || c4 >= min - epsilon) && (max2 === void 0 || c4 <= max2 + epsilon);
+            }
+            return true;
+          });
         }
-      } else if (arc === 'decreasing') {
-        if (angleDiff > 0) {
-          a1 += 360;
+      }, {
+        key: 'cssId',
+        get: function get() {
+          var _this$formats$functio2;
+          return ((_this$formats$functio2 = this.formats.functions) === null || _this$formats$functio2 === void 0 || (_this$formats$functio2 = _this$formats$functio2.color) === null || _this$formats$functio2 === void 0 ? void 0 : _this$formats$functio2.id) || this.id;
         }
-      } else if (arc === 'longer') {
-        if (-180 < angleDiff && angleDiff < 180) {
-          if (angleDiff > 0) {
-            a1 += 360;
+      }, {
+        key: 'isPolar',
+        get: function get() {
+          for (var id in this.coords) {
+            if (this.coords[id].type === 'angle') {
+              return true;
+            }
+          }
+          return false;
+        }
+      }, {
+        key: 'getFormat',
+        value: function getFormat(format) {
+          if (_typeof(format) === 'object') {
+            format = _assertClassBrand(_Class_brand, this, _processFormat).call(this, format);
+            return format;
+          }
+          var ret;
+          if (format === 'default') {
+            ret = Object.values(this.formats)[0];
           } else {
-            a2 += 360;
+            ret = this.formats[format];
+          }
+          if (ret) {
+            ret = _assertClassBrand(_Class_brand, this, _processFormat).call(this, ret);
+            return ret;
+          }
+          return null;
+        }
+      }, {
+        key: 'to',
+        value: function to(space, coords) {
+          if (arguments.length === 1) {
+            var _ref41 = [ space.space, space.coords ];
+            space = _ref41[0];
+            coords = _ref41[1];
+          }
+          space = _ColorSpace.get(space);
+          if (this === space) {
+            return coords;
+          }
+          coords = coords.map(function(c4) {
+            return Number.isNaN(c4) ? 0 : c4;
+          });
+          var myPath = _classPrivateFieldGet(_path, this);
+          var otherPath = _classPrivateFieldGet(_path, space);
+          var connectionSpace, connectionSpaceIndex;
+          for (var _i12 = 0; _i12 < myPath.length; _i12++) {
+            if (myPath[_i12] === otherPath[_i12]) {
+              connectionSpace = myPath[_i12];
+              connectionSpaceIndex = _i12;
+            } else {
+              break;
+            }
+          }
+          if (!connectionSpace) {
+            throw new Error('Cannot convert between color spaces '.concat(this, ' and ').concat(space, ': no connection space was found'));
+          }
+          for (var _i13 = myPath.length - 1; _i13 > connectionSpaceIndex; _i13--) {
+            coords = myPath[_i13].toBase(coords);
+          }
+          for (var _i14 = connectionSpaceIndex + 1; _i14 < otherPath.length; _i14++) {
+            coords = otherPath[_i14].fromBase(coords);
           }
+          return coords;
         }
-      } else if (arc === 'shorter') {
-        if (angleDiff > 180) {
-          a1 += 360;
-        } else if (angleDiff < -180) {
-          a2 += 360;
+      }, {
+        key: 'from',
+        value: function from(space, coords) {
+          if (arguments.length === 1) {
+            var _ref42 = [ space.space, space.coords ];
+            space = _ref42[0];
+            coords = _ref42[1];
+          }
+          space = _ColorSpace.get(space);
+          return space.to(this, coords);
         }
-      }
-      return [ a1, a2 ];
-    }
-    var lch = new ColorSpace({
-      id: 'lch',
-      name: 'LCH',
-      coords: {
-        l: {
-          refRange: [ 0, 100 ],
-          name: 'Lightness'
-        },
-        c: {
-          refRange: [ 0, 150 ],
-          name: 'Chroma'
-        },
-        h: {
-          refRange: [ 0, 360 ],
-          type: 'angle',
-          name: 'Hue'
+      }, {
+        key: 'toString',
+        value: function toString() {
+          return ''.concat(this.name, ' (').concat(this.id, ')');
         }
-      },
-      base: lab,
-      fromBase: function fromBase(Lab) {
-        var _Lab = _slicedToArray(Lab, 3), L = _Lab[0], a2 = _Lab[1], b2 = _Lab[2];
-        var hue;
-        var \u03b52 = .02;
-        if (Math.abs(a2) < \u03b52 && Math.abs(b2) < \u03b52) {
-          hue = NaN;
-        } else {
-          hue = Math.atan2(b2, a2) * 180 / Math.PI;
+      }, {
+        key: 'getMinCoords',
+        value: function getMinCoords() {
+          var ret = [];
+          for (var id in this.coords) {
+            var _range2$min;
+            var meta = this.coords[id];
+            var range2 = meta.range || meta.refRange;
+            ret.push((_range2$min = range2 === null || range2 === void 0 ? void 0 : range2.min) !== null && _range2$min !== void 0 ? _range2$min : 0);
+          }
+          return ret;
         }
-        return [ L, Math.sqrt(Math.pow(a2, 2) + Math.pow(b2, 2)), constrain(hue) ];
-      },
-      toBase: function toBase(LCH) {
-        var _LCH = _slicedToArray(LCH, 3), Lightness = _LCH[0], Chroma = _LCH[1], Hue = _LCH[2];
-        if (Chroma < 0) {
-          Chroma = 0;
+      } ], [ {
+        key: 'all',
+        get: function get() {
+          return _toConsumableArray(new Set(Object.values(_ColorSpace.registry)));
         }
-        if (isNaN(Hue)) {
-          Hue = 0;
+      }, {
+        key: 'register',
+        value: function register(id, space) {
+          if (arguments.length === 1) {
+            space = arguments[0];
+            id = space.id;
+          }
+          space = this.get(space);
+          if (this.registry[id] && this.registry[id] !== space) {
+            throw new Error('Duplicate color space registration: \''.concat(id, '\''));
+          }
+          this.registry[id] = space;
+          if (arguments.length === 1 && space.aliases) {
+            var _iterator7 = _createForOfIteratorHelper(space.aliases), _step7;
+            try {
+              for (_iterator7.s(); !(_step7 = _iterator7.n()).done; ) {
+                var alias = _step7.value;
+                this.register(alias, space);
+              }
+            } catch (err) {
+              _iterator7.e(err);
+            } finally {
+              _iterator7.f();
+            }
+          }
+          return space;
         }
-        return [ Lightness, Chroma * Math.cos(Hue * Math.PI / 180), Chroma * Math.sin(Hue * Math.PI / 180) ];
-      },
-      formats: {
-        lch: {
-          coords: [ ' | ', ' | ', ' | ' ]
+      }, {
+        key: 'get',
+        value: function get(space) {
+          if (!space || space instanceof _ColorSpace) {
+            return space;
+          }
+          var argType = type(space);
+          if (argType === 'string') {
+            var ret = _ColorSpace.registry[space.toLowerCase()];
+            if (!ret) {
+              throw new TypeError('No color space found with id = "'.concat(space, '"'));
+            }
+            return ret;
+          }
+          for (var _len2 = arguments.length, alternatives = new Array(_len2 > 1 ? _len2 - 1 : 0), _key2 = 1; _key2 < _len2; _key2++) {
+            alternatives[_key2 - 1] = arguments[_key2];
+          }
+          if (alternatives.length) {
+            return _ColorSpace.get.apply(_ColorSpace, alternatives);
+          }
+          throw new TypeError(''.concat(space, ' is not a valid color space'));
         }
-      }
-    });
-    var Gfactor = Math.pow(25, 7);
-    var \u03c0$1 = Math.PI;
-    var r2d = 180 / \u03c0$1;
-    var d2r$1 = \u03c0$1 / 180;
-    function pow7(x) {
-      var x2 = x * x;
-      var x7 = x2 * x2 * x2 * x;
-      return x7;
-    }
-    function deltaE2000(color, sample) {
-      var _ref51 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, _ref51$kL = _ref51.kL, kL = _ref51$kL === void 0 ? 1 : _ref51$kL, _ref51$kC = _ref51.kC, kC = _ref51$kC === void 0 ? 1 : _ref51$kC, _ref51$kH = _ref51.kH, kH = _ref51$kH === void 0 ? 1 : _ref51$kH;
-      var _getColor = getColor([ color, sample ]);
-      var _getColor2 = _slicedToArray(_getColor, 2);
-      color = _getColor2[0];
-      sample = _getColor2[1];
-      var _lab$from = lab.from(color), _lab$from2 = _slicedToArray(_lab$from, 3), L1 = _lab$from2[0], a1 = _lab$from2[1], b1 = _lab$from2[2];
-      var C1 = lch.from(lab, [ L1, a1, b1 ])[1];
-      var _lab$from3 = lab.from(sample), _lab$from4 = _slicedToArray(_lab$from3, 3), L2 = _lab$from4[0], a2 = _lab$from4[1], b2 = _lab$from4[2];
-      var C2 = lch.from(lab, [ L2, a2, b2 ])[1];
-      if (C1 < 0) {
-        C1 = 0;
-      }
-      if (C2 < 0) {
-        C2 = 0;
-      }
-      var Cbar = (C1 + C2) / 2;
-      var C7 = pow7(Cbar);
-      var G = .5 * (1 - Math.sqrt(C7 / (C7 + Gfactor)));
-      var adash1 = (1 + G) * a1;
-      var adash2 = (1 + G) * a2;
-      var Cdash1 = Math.sqrt(Math.pow(adash1, 2) + Math.pow(b1, 2));
-      var Cdash2 = Math.sqrt(Math.pow(adash2, 2) + Math.pow(b2, 2));
-      var h1 = adash1 === 0 && b1 === 0 ? 0 : Math.atan2(b1, adash1);
-      var h2 = adash2 === 0 && b2 === 0 ? 0 : Math.atan2(b2, adash2);
-      if (h1 < 0) {
-        h1 += 2 * \u03c0$1;
-      }
-      if (h2 < 0) {
-        h2 += 2 * \u03c0$1;
-      }
-      h1 *= r2d;
-      h2 *= r2d;
-      var \u0394L = L2 - L1;
-      var \u0394C = Cdash2 - Cdash1;
-      var hdiff = h2 - h1;
-      var hsum = h1 + h2;
-      var habs = Math.abs(hdiff);
-      var \u0394h;
-      if (Cdash1 * Cdash2 === 0) {
-        \u0394h = 0;
-      } else if (habs <= 180) {
-        \u0394h = hdiff;
-      } else if (hdiff > 180) {
-        \u0394h = hdiff - 360;
-      } else if (hdiff < -180) {
-        \u0394h = hdiff + 360;
-      } else {
-        defaults.warn('the unthinkable has happened');
-      }
-      var \u0394H = 2 * Math.sqrt(Cdash2 * Cdash1) * Math.sin(\u0394h * d2r$1 / 2);
-      var Ldash = (L1 + L2) / 2;
-      var Cdash = (Cdash1 + Cdash2) / 2;
-      var Cdash7 = pow7(Cdash);
-      var hdash;
-      if (Cdash1 * Cdash2 === 0) {
-        hdash = hsum;
-      } else if (habs <= 180) {
-        hdash = hsum / 2;
-      } else if (hsum < 360) {
-        hdash = (hsum + 360) / 2;
-      } else {
-        hdash = (hsum - 360) / 2;
-      }
-      var lsq = Math.pow(Ldash - 50, 2);
-      var SL = 1 + .015 * lsq / Math.sqrt(20 + lsq);
-      var SC = 1 + .045 * Cdash;
-      var T = 1;
-      T -= .17 * Math.cos((hdash - 30) * d2r$1);
-      T += .24 * Math.cos(2 * hdash * d2r$1);
-      T += .32 * Math.cos((3 * hdash + 6) * d2r$1);
-      T -= .2 * Math.cos((4 * hdash - 63) * d2r$1);
-      var SH = 1 + .015 * Cdash * T;
-      var \u0394\u03b8 = 30 * Math.exp(-1 * Math.pow((hdash - 275) / 25, 2));
-      var RC = 2 * Math.sqrt(Cdash7 / (Cdash7 + Gfactor));
-      var RT = -1 * Math.sin(2 * \u0394\u03b8 * d2r$1) * RC;
-      var dE = Math.pow(\u0394L / (kL * SL), 2);
-      dE += Math.pow(\u0394C / (kC * SC), 2);
-      dE += Math.pow(\u0394H / (kH * SH), 2);
-      dE += RT * (\u0394C / (kC * SC)) * (\u0394H / (kH * SH));
-      return Math.sqrt(dE);
-    }
-    var XYZtoLMS_M$1 = [ [ .819022437996703, .3619062600528904, -.1288737815209879 ], [ .0329836539323885, .9292868615863434, .0361446663506424 ], [ .0481771893596242, .2642395317527308, .6335478284694309 ] ];
-    var LMStoXYZ_M$1 = [ [ 1.2268798758459243, -.5578149944602171, .2813910456659647 ], [ -.0405757452148008, 1.112286803280317, -.0717110580655164 ], [ -.0763729366746601, -.4214933324022432, 1.5869240198367816 ] ];
-    var LMStoLab_M = [ [ .210454268309314, .7936177747023054, -.0040720430116193 ], [ 1.9779985324311684, -2.42859224204858, .450593709617411 ], [ .0259040424655478, .7827717124575296, -.8086757549230774 ] ];
-    var LabtoLMS_M = [ [ 1, .3963377773761749, .2158037573099136 ], [ 1, -.1055613458156586, -.0638541728258133 ], [ 1, -.0894841775298119, -1.2914855480194092 ] ];
-    var OKLab = new ColorSpace({
-      id: 'oklab',
-      name: 'Oklab',
-      coords: {
-        l: {
-          refRange: [ 0, 1 ],
-          name: 'Lightness'
-        },
-        a: {
-          refRange: [ -.4, .4 ]
-        },
-        b: {
-          refRange: [ -.4, .4 ]
+      }, {
+        key: 'resolveCoord',
+        value: function resolveCoord(ref, workingSpace) {
+          var coordType = type(ref);
+          var space, coord;
+          if (coordType === 'string') {
+            if (ref.includes('.')) {
+              var _ref$split = ref.split('.');
+              var _ref$split2 = _slicedToArray(_ref$split, 2);
+              space = _ref$split2[0];
+              coord = _ref$split2[1];
+            } else {
+              space = void 0;
+              coord = ref;
+            }
+          } else if (Array.isArray(ref)) {
+            var _ref43 = _slicedToArray(ref, 2);
+            space = _ref43[0];
+            coord = _ref43[1];
+          } else {
+            space = ref.space;
+            coord = ref.coordId;
+          }
+          space = _ColorSpace.get(space);
+          if (!space) {
+            space = workingSpace;
+          }
+          if (!space) {
+            throw new TypeError('Cannot resolve coordinate reference '.concat(ref, ': No color space specified and relative references are not allowed here'));
+          }
+          coordType = type(coord);
+          if (coordType === 'number' || coordType === 'string' && coord >= 0) {
+            var meta = Object.entries(space.coords)[coord];
+            if (meta) {
+              return _extends({
+                space: space,
+                id: meta[0],
+                index: coord
+              }, meta[1]);
+            }
+          }
+          space = _ColorSpace.get(space);
+          var normalizedCoord = coord.toLowerCase();
+          var i = 0;
+          for (var id in space.coords) {
+            var _meta$name;
+            var _meta = space.coords[id];
+            if (id.toLowerCase() === normalizedCoord || ((_meta$name = _meta.name) === null || _meta$name === void 0 ? void 0 : _meta$name.toLowerCase()) === normalizedCoord) {
+              return _extends({
+                space: space,
+                id: id,
+                index: i
+              }, _meta);
+            }
+            i++;
+          }
+          throw new TypeError('No "'.concat(coord, '" coordinate found in ').concat(space.name, '. Its coordinates are: ').concat(Object.keys(space.coords).join(', ')));
         }
-      },
-      white: 'D65',
-      base: xyz_d65,
-      fromBase: function fromBase(XYZ) {
-        var LMS = multiplyMatrices(XYZtoLMS_M$1, XYZ);
-        var LMSg = LMS.map(function(val) {
-          return Math.cbrt(val);
-        });
-        return multiplyMatrices(LMStoLab_M, LMSg);
-      },
-      toBase: function toBase(OKLab2) {
-        var LMSg = multiplyMatrices(LabtoLMS_M, OKLab2);
-        var LMS = LMSg.map(function(val) {
-          return Math.pow(val, 3);
+      } ]);
+    }());
+    function _processFormat(format) {
+      if (format.coords && !format.coordGrammar) {
+        format.type || (format.type = 'function');
+        format.name || (format.name = 'color');
+        format.coordGrammar = parseCoordGrammar(format.coords);
+        var coordFormats = Object.entries(this.coords).map(function(_ref151, i) {
+          var _ref152 = _slicedToArray(_ref151, 2), id = _ref152[0], coordMeta = _ref152[1];
+          var outputType = format.coordGrammar[i][0];
+          var fromRange = coordMeta.range || coordMeta.refRange;
+          var toRange = outputType.range, suffix = '';
+          if (outputType == '') {
+            toRange = [ 0, 100 ];
+            suffix = '%';
+          } else if (outputType == '') {
+            suffix = 'deg';
+          }
+          return {
+            fromRange: fromRange,
+            toRange: toRange,
+            suffix: suffix
+          };
         });
-        return multiplyMatrices(LMStoXYZ_M$1, LMS);
-      },
-      formats: {
-        oklab: {
-          coords: [ ' | ', ' | [-1,1]', ' | [-1,1]' ]
-        }
-      }
-    });
-    function deltaEOK(color, sample) {
-      var _getColor3 = getColor([ color, sample ]);
-      var _getColor4 = _slicedToArray(_getColor3, 2);
-      color = _getColor4[0];
-      sample = _getColor4[1];
-      var _OKLab$from = OKLab.from(color), _OKLab$from2 = _slicedToArray(_OKLab$from, 3), L1 = _OKLab$from2[0], a1 = _OKLab$from2[1], b1 = _OKLab$from2[2];
-      var _OKLab$from3 = OKLab.from(sample), _OKLab$from4 = _slicedToArray(_OKLab$from3, 3), L2 = _OKLab$from4[0], a2 = _OKLab$from4[1], b2 = _OKLab$from4[2];
-      var \u0394L = L1 - L2;
-      var \u0394a = a1 - a2;
-      var \u0394b = b1 - b2;
-      return Math.sqrt(Math.pow(\u0394L, 2) + Math.pow(\u0394a, 2) + Math.pow(\u0394b, 2));
-    }
-    var \u03b5$5 = 75e-6;
-    function inGamut(color, space) {
-      var _ref52 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, _ref52$epsilon = _ref52.epsilon, epsilon = _ref52$epsilon === void 0 ? \u03b5$5 : _ref52$epsilon;
-      color = getColor(color);
-      if (!space) {
-        space = color.space;
-      }
-      space = ColorSpace.get(space);
-      var coords = color.coords;
-      if (space !== color.space) {
-        coords = space.from(color);
+        format.serializeCoords = function(coords, precision) {
+          return coords.map(function(c4, i) {
+            var _coordFormats$i = coordFormats[i], fromRange = _coordFormats$i.fromRange, toRange = _coordFormats$i.toRange, suffix = _coordFormats$i.suffix;
+            if (fromRange && toRange) {
+              c4 = mapRange(fromRange, toRange, c4);
+            }
+            c4 = toPrecision(c4, precision);
+            if (suffix) {
+              c4 += suffix;
+            }
+            return c4;
+          });
+        };
       }
-      return space.inGamut(coords, {
-        epsilon: epsilon
-      });
-    }
-    function clone2(color) {
-      return {
-        space: color.space,
-        coords: color.coords.slice(),
-        alpha: color.alpha
-      };
-    }
-    function distance(color1, color2) {
-      var space = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 'lab';
-      space = ColorSpace.get(space);
-      var coords1 = space.from(color1);
-      var coords2 = space.from(color2);
-      return Math.sqrt(coords1.reduce(function(acc, c12, i) {
-        var c22 = coords2[i];
-        if (isNaN(c12) || isNaN(c22)) {
-          return acc;
-        }
-        return acc + Math.pow(c22 - c12, 2);
-      }, 0));
-    }
-    function deltaE76(color, sample) {
-      return distance(color, sample, 'lab');
+      return format;
     }
-    var \u03c0 = Math.PI;
-    var d2r = \u03c0 / 180;
-    function deltaECMC(color, sample) {
-      var _ref53 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, _ref53$l = _ref53.l, l = _ref53$l === void 0 ? 2 : _ref53$l, _ref53$c = _ref53.c, c4 = _ref53$c === void 0 ? 1 : _ref53$c;
-      var _getColor5 = getColor([ color, sample ]);
-      var _getColor6 = _slicedToArray(_getColor5, 2);
-      color = _getColor6[0];
-      sample = _getColor6[1];
-      var _lab$from5 = lab.from(color), _lab$from6 = _slicedToArray(_lab$from5, 3), L1 = _lab$from6[0], a1 = _lab$from6[1], b1 = _lab$from6[2];
-      var _lch$from = lch.from(lab, [ L1, a1, b1 ]), _lch$from2 = _slicedToArray(_lch$from, 3), C1 = _lch$from2[1], H1 = _lch$from2[2];
-      var _lab$from7 = lab.from(sample), _lab$from8 = _slicedToArray(_lab$from7, 3), L2 = _lab$from8[0], a2 = _lab$from8[1], b2 = _lab$from8[2];
-      var C2 = lch.from(lab, [ L2, a2, b2 ])[1];
-      if (C1 < 0) {
-        C1 = 0;
-      }
-      if (C2 < 0) {
-        C2 = 0;
-      }
-      var \u0394L = L1 - L2;
-      var \u0394C = C1 - C2;
-      var \u0394a = a1 - a2;
-      var \u0394b = b1 - b2;
-      var H2 = Math.pow(\u0394a, 2) + Math.pow(\u0394b, 2) - Math.pow(\u0394C, 2);
-      var SL = .511;
-      if (L1 >= 16) {
-        SL = .040975 * L1 / (1 + .01765 * L1);
+    function _getPath() {
+      var ret = [ this ];
+      for (var _space2 = this; _space2 = _space2.base; ) {
+        ret.push(_space2);
       }
-      var SC = .0638 * C1 / (1 + .0131 * C1) + .638;
-      var T;
-      if (Number.isNaN(H1)) {
-        H1 = 0;
-      }
-      if (H1 >= 164 && H1 <= 345) {
-        T = .56 + Math.abs(.2 * Math.cos((H1 + 168) * d2r));
-      } else {
-        T = .36 + Math.abs(.4 * Math.cos((H1 + 35) * d2r));
-      }
-      var C4 = Math.pow(C1, 4);
-      var F = Math.sqrt(C4 / (C4 + 1900));
-      var SH = SC * (F * T + 1 - F);
-      var dE = Math.pow(\u0394L / (l * SL), 2);
-      dE += Math.pow(\u0394C / (c4 * SC), 2);
-      dE += H2 / Math.pow(SH, 2);
-      return Math.sqrt(dE);
+      return ret;
     }
-    var Yw$1 = 203;
-    var XYZ_Abs_D65 = new ColorSpace({
-      id: 'xyz-abs-d65',
-      cssId: '--xyz-abs-d65',
-      name: 'Absolute XYZ D65',
-      coords: {
-        x: {
-          refRange: [ 0, 9504.7 ],
-          name: 'Xa'
-        },
-        y: {
-          refRange: [ 0, 1e4 ],
-          name: 'Ya'
-        },
-        z: {
-          refRange: [ 0, 10888.3 ],
-          name: 'Za'
-        }
-      },
-      base: xyz_d65,
-      fromBase: function fromBase(XYZ) {
-        return XYZ.map(function(v) {
-          return Math.max(v * Yw$1, 0);
-        });
-      },
-      toBase: function toBase(AbsXYZ) {
-        return AbsXYZ.map(function(v) {
-          return Math.max(v / Yw$1, 0);
-        });
-      }
+    var ColorSpace = _ColorSpace;
+    __publicField(ColorSpace, 'registry', {});
+    __publicField(ColorSpace, 'DEFAULT_FORMAT', {
+      type: 'functions',
+      name: 'color'
     });
-    var b$1 = 1.15;
-    var g = .66;
-    var n$1 = 2610 / Math.pow(2, 14);
-    var ninv$1 = Math.pow(2, 14) / 2610;
-    var c1$2 = 3424 / Math.pow(2, 12);
-    var c2$2 = 2413 / Math.pow(2, 7);
-    var c3$2 = 2392 / Math.pow(2, 7);
-    var p = 1.7 * 2523 / Math.pow(2, 5);
-    var pinv = Math.pow(2, 5) / (1.7 * 2523);
-    var d = -.56;
-    var d0 = 16295499532821565e-27;
-    var XYZtoCone_M = [ [ .41478972, .579999, .014648 ], [ -.20151, 1.120649, .0531008 ], [ -.0166008, .2648, .6684799 ] ];
-    var ConetoXYZ_M = [ [ 1.9242264357876067, -1.0047923125953657, .037651404030618 ], [ .35031676209499907, .7264811939316552, -.06538442294808501 ], [ -.09098281098284752, -.3127282905230739, 1.5227665613052603 ] ];
-    var ConetoIab_M = [ [ .5, .5, 0 ], [ 3.524, -4.066708, .542708 ], [ .199076, 1.096799, -1.295875 ] ];
-    var IabtoCone_M = [ [ 1, .1386050432715393, .05804731615611886 ], [ .9999999999999999, -.1386050432715393, -.05804731615611886 ], [ .9999999999999998, -.09601924202631895, -.8118918960560388 ] ];
-    var Jzazbz = new ColorSpace({
-      id: 'jzazbz',
-      name: 'Jzazbz',
+    var XYZ_D65 = new ColorSpace({
+      id: 'xyz-d65',
+      name: 'XYZ D65',
       coords: {
-        jz: {
-          refRange: [ 0, 1 ],
-          name: 'Jz'
-        },
-        az: {
-          refRange: [ -.5, .5 ]
+        x: {
+          name: 'X'
         },
-        bz: {
-          refRange: [ -.5, .5 ]
-        }
-      },
-      base: XYZ_Abs_D65,
-      fromBase: function fromBase(XYZ) {
-        var _XYZ = _slicedToArray(XYZ, 3), Xa = _XYZ[0], Ya = _XYZ[1], Za = _XYZ[2];
-        var Xm = b$1 * Xa - (b$1 - 1) * Za;
-        var Ym = g * Ya - (g - 1) * Xa;
-        var LMS = multiplyMatrices(XYZtoCone_M, [ Xm, Ym, Za ]);
-        var PQLMS = LMS.map(function(val) {
-          var num = c1$2 + c2$2 * Math.pow(val / 1e4, n$1);
-          var denom = 1 + c3$2 * Math.pow(val / 1e4, n$1);
-          return Math.pow(num / denom, p);
-        });
-        var _multiplyMatrices = multiplyMatrices(ConetoIab_M, PQLMS), _multiplyMatrices2 = _slicedToArray(_multiplyMatrices, 3), Iz = _multiplyMatrices2[0], az = _multiplyMatrices2[1], bz = _multiplyMatrices2[2];
-        var Jz = (1 + d) * Iz / (1 + d * Iz) - d0;
-        return [ Jz, az, bz ];
-      },
-      toBase: function toBase(Jzazbz2) {
-        var _Jzazbz = _slicedToArray(Jzazbz2, 3), Jz = _Jzazbz[0], az = _Jzazbz[1], bz = _Jzazbz[2];
-        var Iz = (Jz + d0) / (1 + d - d * (Jz + d0));
-        var PQLMS = multiplyMatrices(IabtoCone_M, [ Iz, az, bz ]);
-        var LMS = PQLMS.map(function(val) {
-          var num = c1$2 - Math.pow(val, pinv);
-          var denom = c3$2 * Math.pow(val, pinv) - c2$2;
-          var x = 1e4 * Math.pow(num / denom, ninv$1);
-          return x;
-        });
-        var _multiplyMatrices3 = multiplyMatrices(ConetoXYZ_M, LMS), _multiplyMatrices4 = _slicedToArray(_multiplyMatrices3, 3), Xm = _multiplyMatrices4[0], Ym = _multiplyMatrices4[1], Za = _multiplyMatrices4[2];
-        var Xa = (Xm + (b$1 - 1) * Za) / b$1;
-        var Ya = (Ym + (g - 1) * Xa) / g;
-        return [ Xa, Ya, Za ];
+        y: {
+          name: 'Y'
+        },
+        z: {
+          name: 'Z'
+        }
       },
+      white: 'D65',
       formats: {
         color: {
-          coords: [ ' | ', ' | [-1,1]', ' | [-1,1]' ]
+          ids: [ 'xyz-d65', 'xyz' ]
         }
-      }
+      },
+      aliases: [ 'xyz' ]
     });
-    var jzczhz = new ColorSpace({
-      id: 'jzczhz',
-      name: 'JzCzHz',
-      coords: {
-        jz: {
-          refRange: [ 0, 1 ],
-          name: 'Jz'
-        },
-        cz: {
-          refRange: [ 0, 1 ],
-          name: 'Chroma'
-        },
-        hz: {
-          refRange: [ 0, 360 ],
-          type: 'angle',
-          name: 'Hue'
+    var RGBColorSpace = function(_ColorSpace2) {
+      function RGBColorSpace(options) {
+        var _options$referred;
+        var _this;
+        _classCallCheck(this, RGBColorSpace);
+        if (!options.coords) {
+          options.coords = {
+            r: {
+              range: [ 0, 1 ],
+              name: 'Red'
+            },
+            g: {
+              range: [ 0, 1 ],
+              name: 'Green'
+            },
+            b: {
+              range: [ 0, 1 ],
+              name: 'Blue'
+            }
+          };
         }
-      },
-      base: Jzazbz,
-      fromBase: function fromBase(jzazbz) {
-        var _jzazbz = _slicedToArray(jzazbz, 3), Jz = _jzazbz[0], az = _jzazbz[1], bz = _jzazbz[2];
-        var hue;
-        var \u03b52 = 2e-4;
-        if (Math.abs(az) < \u03b52 && Math.abs(bz) < \u03b52) {
-          hue = NaN;
+        if (!options.base) {
+          options.base = XYZ_D65;
+        }
+        if (options.toXYZ_M && options.fromXYZ_M) {
+          var _options$toBase, _options$fromBase;
+          (_options$toBase = options.toBase) !== null && _options$toBase !== void 0 ? _options$toBase : options.toBase = function(rgb) {
+            var xyz = multiplyMatrices(options.toXYZ_M, rgb);
+            if (_this.white !== _this.base.white) {
+              xyz = adapt$1(_this.white, _this.base.white, xyz);
+            }
+            return xyz;
+          };
+          (_options$fromBase = options.fromBase) !== null && _options$fromBase !== void 0 ? _options$fromBase : options.fromBase = function(xyz) {
+            xyz = adapt$1(_this.base.white, _this.white, xyz);
+            return multiplyMatrices(options.fromXYZ_M, xyz);
+          };
+        }
+        (_options$referred = options.referred) !== null && _options$referred !== void 0 ? _options$referred : options.referred = 'display';
+        return _this = _callSuper(this, RGBColorSpace, [ options ]);
+      }
+      _inherits(RGBColorSpace, _ColorSpace2);
+      return _createClass(RGBColorSpace);
+    }(ColorSpace);
+    function parse2(str) {
+      var _String;
+      var env = {
+        str: (_String = String(str)) === null || _String === void 0 ? void 0 : _String.trim()
+      };
+      hooks.run('parse-start', env);
+      if (env.color) {
+        return env.color;
+      }
+      env.parsed = parseFunction(env.str);
+      if (env.parsed) {
+        var name = env.parsed.name;
+        if (name === 'color') {
+          var id = env.parsed.args.shift();
+          var alpha = env.parsed.rawArgs.indexOf('/') > 0 ? env.parsed.args.pop() : 1;
+          var _iterator8 = _createForOfIteratorHelper(ColorSpace.all), _step8;
+          try {
+            var _loop5 = function _loop5() {
+              var space = _step8.value;
+              var colorSpec = space.getFormat('color');
+              if (colorSpec) {
+                var _colorSpec$ids;
+                if (id === colorSpec.id || (_colorSpec$ids = colorSpec.ids) !== null && _colorSpec$ids !== void 0 && _colorSpec$ids.includes(id)) {
+                  var argCount = Object.keys(space.coords).length;
+                  var coords = Array(argCount).fill(0);
+                  coords.forEach(function(_, i) {
+                    return coords[i] = env.parsed.args[i] || 0;
+                  });
+                  return {
+                    v: {
+                      spaceId: space.id,
+                      coords: coords,
+                      alpha: alpha
+                    }
+                  };
+                }
+              }
+            }, _ret2;
+            for (_iterator8.s(); !(_step8 = _iterator8.n()).done; ) {
+              _ret2 = _loop5();
+              if (_ret2) {
+                return _ret2.v;
+              }
+            }
+          } catch (err) {
+            _iterator8.e(err);
+          } finally {
+            _iterator8.f();
+          }
+          var didYouMean = '';
+          if (id in ColorSpace.registry) {
+            var _ColorSpace$registry$;
+            var cssId = (_ColorSpace$registry$ = ColorSpace.registry[id].formats) === null || _ColorSpace$registry$ === void 0 || (_ColorSpace$registry$ = _ColorSpace$registry$.functions) === null || _ColorSpace$registry$ === void 0 || (_ColorSpace$registry$ = _ColorSpace$registry$.color) === null || _ColorSpace$registry$ === void 0 ? void 0 : _ColorSpace$registry$.id;
+            if (cssId) {
+              didYouMean = 'Did you mean color('.concat(cssId, ')?');
+            }
+          }
+          throw new TypeError('Cannot parse color('.concat(id, '). ') + (didYouMean || 'Missing a plugin?'));
         } else {
-          hue = Math.atan2(bz, az) * 180 / Math.PI;
+          var _iterator9 = _createForOfIteratorHelper(ColorSpace.all), _step9;
+          try {
+            var _loop6 = function _loop6() {
+              var space = _step9.value;
+              var format = space.getFormat(name);
+              if (format && format.type === 'function') {
+                var _alpha = 1;
+                if (format.lastAlpha || last(env.parsed.args).alpha) {
+                  _alpha = env.parsed.args.pop();
+                }
+                var coords = env.parsed.args;
+                if (format.coordGrammar) {
+                  Object.entries(space.coords).forEach(function(_ref44, i) {
+                    var _coords$i;
+                    var _ref45 = _slicedToArray(_ref44, 2), id = _ref45[0], coordMeta = _ref45[1];
+                    var coordGrammar2 = format.coordGrammar[i];
+                    var providedType = (_coords$i = coords[i]) === null || _coords$i === void 0 ? void 0 : _coords$i.type;
+                    coordGrammar2 = coordGrammar2.find(function(c4) {
+                      return c4 == providedType;
+                    });
+                    if (!coordGrammar2) {
+                      var coordName = coordMeta.name || id;
+                      throw new TypeError(''.concat(providedType, ' not allowed for ').concat(coordName, ' in ').concat(name, '()'));
+                    }
+                    var fromRange = coordGrammar2.range;
+                    if (providedType === '') {
+                      fromRange || (fromRange = [ 0, 1 ]);
+                    }
+                    var toRange = coordMeta.range || coordMeta.refRange;
+                    if (fromRange && toRange) {
+                      coords[i] = mapRange(fromRange, toRange, coords[i]);
+                    }
+                  });
+                }
+                return {
+                  v: {
+                    spaceId: space.id,
+                    coords: coords,
+                    alpha: _alpha
+                  }
+                };
+              }
+            }, _ret3;
+            for (_iterator9.s(); !(_step9 = _iterator9.n()).done; ) {
+              _ret3 = _loop6();
+              if (_ret3) {
+                return _ret3.v;
+              }
+            }
+          } catch (err) {
+            _iterator9.e(err);
+          } finally {
+            _iterator9.f();
+          }
+        }
+      } else {
+        var _iterator10 = _createForOfIteratorHelper(ColorSpace.all), _step10;
+        try {
+          for (_iterator10.s(); !(_step10 = _iterator10.n()).done; ) {
+            var space = _step10.value;
+            for (var formatId in space.formats) {
+              var format = space.formats[formatId];
+              if (format.type !== 'custom') {
+                continue;
+              }
+              if (format.test && !format.test(env.str)) {
+                continue;
+              }
+              var color = format.parse(env.str);
+              if (color) {
+                var _color$alpha;
+                (_color$alpha = color.alpha) !== null && _color$alpha !== void 0 ? _color$alpha : color.alpha = 1;
+                return color;
+              }
+            }
+          }
+        } catch (err) {
+          _iterator10.e(err);
+        } finally {
+          _iterator10.f();
         }
-        return [ Jz, Math.sqrt(Math.pow(az, 2) + Math.pow(bz, 2)), constrain(hue) ];
-      },
-      toBase: function toBase(jzczhz2) {
-        return [ jzczhz2[0], jzczhz2[1] * Math.cos(jzczhz2[2] * Math.PI / 180), jzczhz2[1] * Math.sin(jzczhz2[2] * Math.PI / 180) ];
       }
-    });
-    function deltaEJz(color, sample) {
-      var _getColor7 = getColor([ color, sample ]);
-      var _getColor8 = _slicedToArray(_getColor7, 2);
-      color = _getColor8[0];
-      sample = _getColor8[1];
-      var _jzczhz$from = jzczhz.from(color), _jzczhz$from2 = _slicedToArray(_jzczhz$from, 3), Jz1 = _jzczhz$from2[0], Cz1 = _jzczhz$from2[1], Hz1 = _jzczhz$from2[2];
-      var _jzczhz$from3 = jzczhz.from(sample), _jzczhz$from4 = _slicedToArray(_jzczhz$from3, 3), Jz2 = _jzczhz$from4[0], Cz2 = _jzczhz$from4[1], Hz2 = _jzczhz$from4[2];
-      var \u0394J = Jz1 - Jz2;
-      var \u0394C = Cz1 - Cz2;
-      if (Number.isNaN(Hz1) && Number.isNaN(Hz2)) {
-        Hz1 = 0;
-        Hz2 = 0;
-      } else if (Number.isNaN(Hz1)) {
-        Hz1 = Hz2;
-      } else if (Number.isNaN(Hz2)) {
-        Hz2 = Hz1;
+      throw new TypeError('Could not parse '.concat(str, ' as a color. Missing a plugin?'));
+    }
+    function getColor(color) {
+      if (!color) {
+        throw new TypeError('Empty color reference');
       }
-      var \u0394h = Hz1 - Hz2;
-      var \u0394H = 2 * Math.sqrt(Cz1 * Cz2) * Math.sin(\u0394h / 2 * (Math.PI / 180));
-      return Math.sqrt(Math.pow(\u0394J, 2) + Math.pow(\u0394C, 2) + Math.pow(\u0394H, 2));
+      if (isString(color)) {
+        color = parse2(color);
+      }
+      var space = color.space || color.spaceId;
+      if (!(space instanceof ColorSpace)) {
+        color.space = ColorSpace.get(space);
+      }
+      if (color.alpha === void 0) {
+        color.alpha = 1;
+      }
+      return color;
     }
-    var c1$1 = 3424 / 4096;
-    var c2$1 = 2413 / 128;
-    var c3$1 = 2392 / 128;
-    var m1$1 = 2610 / 16384;
-    var m2 = 2523 / 32;
-    var im1 = 16384 / 2610;
-    var im2 = 32 / 2523;
-    var XYZtoLMS_M = [ [ .3592832590121217, .6976051147779502, -.035891593232029 ], [ -.1920808463704993, 1.100476797037432, .0753748658519118 ], [ .0070797844607479, .0748396662186362, .8433265453898765 ] ];
-    var LMStoIPT_M = [ [ 2048 / 4096, 2048 / 4096, 0 ], [ 6610 / 4096, -13613 / 4096, 7003 / 4096 ], [ 17933 / 4096, -17390 / 4096, -543 / 4096 ] ];
-    var IPTtoLMS_M = [ [ .9999999999999998, .0086090370379328, .111029625003026 ], [ .9999999999999998, -.0086090370379328, -.1110296250030259 ], [ .9999999999999998, .5600313357106791, -.3206271749873188 ] ];
-    var LMStoXYZ_M = [ [ 2.0701522183894223, -1.3263473389671563, .2066510476294053 ], [ .3647385209748072, .6805660249472273, -.0453045459220347 ], [ -.0497472075358123, -.0492609666966131, 1.1880659249923042 ] ];
-    var ictcp = new ColorSpace({
-      id: 'ictcp',
-      name: 'ICTCP',
-      coords: {
-        i: {
-          refRange: [ 0, 1 ],
-          name: 'I'
-        },
-        ct: {
-          refRange: [ -.5, .5 ],
-          name: 'CT'
-        },
-        cp: {
-          refRange: [ -.5, .5 ],
-          name: 'CP'
+    function getAll(color, space) {
+      space = ColorSpace.get(space);
+      return space.from(color);
+    }
+    function get(color, prop) {
+      var _ColorSpace$resolveCo = ColorSpace.resolveCoord(prop, color.space), space = _ColorSpace$resolveCo.space, index = _ColorSpace$resolveCo.index;
+      var coords = getAll(color, space);
+      return coords[index];
+    }
+    function setAll(color, space, coords) {
+      space = ColorSpace.get(space);
+      color.coords = space.to(color.space, coords);
+      return color;
+    }
+    function set(color, prop, value) {
+      color = getColor(color);
+      if (arguments.length === 2 && type(arguments[1]) === 'object') {
+        var object = arguments[1];
+        for (var p2 in object) {
+          set(color, p2, object[p2]);
+        }
+      } else {
+        if (typeof value === 'function') {
+          value = value(get(color, prop));
         }
+        var _ColorSpace$resolveCo2 = ColorSpace.resolveCoord(prop, color.space), space = _ColorSpace$resolveCo2.space, index = _ColorSpace$resolveCo2.index;
+        var coords = getAll(color, space);
+        coords[index] = value;
+        setAll(color, space, coords);
+      }
+      return color;
+    }
+    var XYZ_D50 = new ColorSpace({
+      id: 'xyz-d50',
+      name: 'XYZ D50',
+      white: 'D50',
+      base: XYZ_D65,
+      fromBase: function fromBase(coords) {
+        return adapt$1(XYZ_D65.white, 'D50', coords);
       },
-      base: XYZ_Abs_D65,
-      fromBase: function fromBase(XYZ) {
-        var LMS = multiplyMatrices(XYZtoLMS_M, XYZ);
-        return LMStoICtCp(LMS);
+      toBase: function toBase(coords) {
+        return adapt$1('D50', XYZ_D65.white, coords);
       },
-      toBase: function toBase(ICtCp) {
-        var LMS = ICtCptoLMS(ICtCp);
-        return multiplyMatrices(LMStoXYZ_M, LMS);
+      formats: {
+        color: {}
       }
     });
-    function LMStoICtCp(LMS) {
-      var PQLMS = LMS.map(function(val) {
-        var num = c1$1 + c2$1 * Math.pow(val / 1e4, m1$1);
-        var denom = 1 + c3$1 * Math.pow(val / 1e4, m1$1);
-        return Math.pow(num / denom, m2);
-      });
-      return multiplyMatrices(LMStoIPT_M, PQLMS);
-    }
-    function ICtCptoLMS(ICtCp) {
-      var PQLMS = multiplyMatrices(IPTtoLMS_M, ICtCp);
-      var LMS = PQLMS.map(function(val) {
-        var num = Math.max(Math.pow(val, im2) - c1$1, 0);
-        var denom = c2$1 - c3$1 * Math.pow(val, im2);
-        return 1e4 * Math.pow(num / denom, im1);
-      });
-      return LMS;
-    }
-    function deltaEITP(color, sample) {
-      var _getColor9 = getColor([ color, sample ]);
-      var _getColor10 = _slicedToArray(_getColor9, 2);
-      color = _getColor10[0];
-      sample = _getColor10[1];
-      var _ictcp$from = ictcp.from(color), _ictcp$from2 = _slicedToArray(_ictcp$from, 3), I1 = _ictcp$from2[0], T1 = _ictcp$from2[1], P1 = _ictcp$from2[2];
-      var _ictcp$from3 = ictcp.from(sample), _ictcp$from4 = _slicedToArray(_ictcp$from3, 3), I2 = _ictcp$from4[0], T2 = _ictcp$from4[1], P2 = _ictcp$from4[2];
-      return 720 * Math.sqrt(Math.pow(I1 - I2, 2) + .25 * Math.pow(T1 - T2, 2) + Math.pow(P1 - P2, 2));
-    }
-    var white$3 = WHITES.D65;
-    var adaptedCoef = .42;
-    var adaptedCoefInv = 1 / adaptedCoef;
-    var tau = 2 * Math.PI;
-    var cat16 = [ [ .401288, .650173, -.051461 ], [ -.250268, 1.204414, .045854 ], [ -.002079, .048952, .953127 ] ];
-    var cat16Inv = [ [ 1.8620678550872327, -1.0112546305316843, .14918677544445175 ], [ .38752654323613717, .6214474419314753, -.008973985167612518 ], [ -.015841498849333856, -.03412293802851557, 1.0499644368778496 ] ];
-    var m1 = [ [ 460, 451, 288 ], [ 460, -891, -261 ], [ 460, -220, -6300 ] ];
-    var surroundMap = {
-      dark: [ .8, .525, .8 ],
-      dim: [ .9, .59, .9 ],
-      average: [ 1, .69, 1 ]
-    };
-    var hueQuadMap = {
-      h: [ 20.14, 90, 164.25, 237.53, 380.14 ],
-      e: [ .8, .7, 1, 1.2, .8 ],
-      H: [ 0, 100, 200, 300, 400 ]
-    };
-    var rad2deg = 180 / Math.PI;
-    var deg2rad$1 = Math.PI / 180;
-    function adapt$1(coords, fl) {
-      var temp = coords.map(function(c4) {
-        var x = spow(fl * Math.abs(c4) * .01, adaptedCoef);
-        return 400 * copySign(x, c4) / (x + 27.13);
-      });
-      return temp;
-    }
-    function unadapt(adapted, fl) {
-      var constant = 100 / fl * Math.pow(27.13, adaptedCoefInv);
-      return adapted.map(function(c4) {
-        var cabs = Math.abs(c4);
-        return copySign(constant * spow(cabs / (400 - cabs), adaptedCoefInv), c4);
-      });
-    }
-    function hueQuadrature(h) {
-      var hp = constrain(h);
-      if (hp <= hueQuadMap.h[0]) {
-        hp += 360;
-      }
-      var i = bisectLeft(hueQuadMap.h, hp) - 1;
-      var _hueQuadMap$h$slice = hueQuadMap.h.slice(i, i + 2), _hueQuadMap$h$slice2 = _slicedToArray(_hueQuadMap$h$slice, 2), hi = _hueQuadMap$h$slice2[0], hii = _hueQuadMap$h$slice2[1];
-      var _hueQuadMap$e$slice = hueQuadMap.e.slice(i, i + 2), _hueQuadMap$e$slice2 = _slicedToArray(_hueQuadMap$e$slice, 2), ei = _hueQuadMap$e$slice2[0], eii = _hueQuadMap$e$slice2[1];
-      var Hi = hueQuadMap.H[i];
-      var t = (hp - hi) / ei;
-      return Hi + 100 * t / (t + (hii - hp) / eii);
-    }
-    function invHueQuadrature(H) {
-      var Hp = (H % 400 + 400) % 400;
-      var i = Math.floor(.01 * Hp);
-      Hp = Hp % 100;
-      var _hueQuadMap$h$slice3 = hueQuadMap.h.slice(i, i + 2), _hueQuadMap$h$slice4 = _slicedToArray(_hueQuadMap$h$slice3, 2), hi = _hueQuadMap$h$slice4[0], hii = _hueQuadMap$h$slice4[1];
-      var _hueQuadMap$e$slice3 = hueQuadMap.e.slice(i, i + 2), _hueQuadMap$e$slice4 = _slicedToArray(_hueQuadMap$e$slice3, 2), ei = _hueQuadMap$e$slice4[0], eii = _hueQuadMap$e$slice4[1];
-      return constrain((Hp * (eii * hi - ei * hii) - 100 * hi * eii) / (Hp * (eii - ei) - 100 * eii));
-    }
-    function environment(refWhite, adaptingLuminance, backgroundLuminance, surround, discounting) {
-      var env = {};
-      env.discounting = discounting;
-      env.refWhite = refWhite;
-      env.surround = surround;
-      var xyzW = refWhite.map(function(c4) {
-        return c4 * 100;
-      });
-      env.la = adaptingLuminance;
-      env.yb = backgroundLuminance;
-      var yw = xyzW[1];
-      var rgbW = multiplyMatrices(cat16, xyzW);
-      surround = surroundMap[env.surround];
-      var f = surround[0];
-      env.c = surround[1];
-      env.nc = surround[2];
-      var k = 1 / (5 * env.la + 1);
-      var k4 = Math.pow(k, 4);
-      env.fl = k4 * env.la + .1 * (1 - k4) * (1 - k4) * Math.cbrt(5 * env.la);
-      env.flRoot = Math.pow(env.fl, .25);
-      env.n = env.yb / yw;
-      env.z = 1.48 + Math.sqrt(env.n);
-      env.nbb = .725 * Math.pow(env.n, -.2);
-      env.ncb = env.nbb;
-      var d2 = discounting ? 1 : Math.max(Math.min(f * (1 - 1 / 3.6 * Math.exp((-env.la - 42) / 92)), 1), 0);
-      env.dRgb = rgbW.map(function(c4) {
-        return interpolate(1, yw / c4, d2);
-      });
-      env.dRgbInv = env.dRgb.map(function(c4) {
-        return 1 / c4;
-      });
-      var rgbCW = rgbW.map(function(c4, i) {
-        return c4 * env.dRgb[i];
-      });
-      var rgbAW = adapt$1(rgbCW, env.fl);
-      env.aW = env.nbb * (2 * rgbAW[0] + rgbAW[1] + .05 * rgbAW[2]);
-      return env;
-    }
-    var viewingConditions$1 = environment(white$3, 64 / Math.PI * .2, 20, 'average', false);
-    function fromCam16(cam162, env) {
-      if (!(cam162.J !== void 0 ^ cam162.Q !== void 0)) {
-        throw new Error('Conversion requires one and only one: \'J\' or \'Q\'');
-      }
-      if (!(cam162.C !== void 0 ^ cam162.M !== void 0 ^ cam162.s !== void 0)) {
-        throw new Error('Conversion requires one and only one: \'C\', \'M\' or \'s\'');
-      }
-      if (!(cam162.h !== void 0 ^ cam162.H !== void 0)) {
-        throw new Error('Conversion requires one and only one: \'h\' or \'H\'');
-      }
-      if (cam162.J === 0 || cam162.Q === 0) {
-        return [ 0, 0, 0 ];
-      }
-      var hRad = 0;
-      if (cam162.h !== void 0) {
-        hRad = constrain(cam162.h) * deg2rad$1;
-      } else {
-        hRad = invHueQuadrature(cam162.H) * deg2rad$1;
-      }
-      var cosh = Math.cos(hRad);
-      var sinh = Math.sin(hRad);
-      var Jroot = 0;
-      if (cam162.J !== void 0) {
-        Jroot = spow(cam162.J, 1 / 2) * .1;
-      } else if (cam162.Q !== void 0) {
-        Jroot = .25 * env.c * cam162.Q / ((env.aW + 4) * env.flRoot);
-      }
-      var alpha = 0;
-      if (cam162.C !== void 0) {
-        alpha = cam162.C / Jroot;
-      } else if (cam162.M !== void 0) {
-        alpha = cam162.M / env.flRoot / Jroot;
-      } else if (cam162.s !== void 0) {
-        alpha = 4e-4 * Math.pow(cam162.s, 2) * (env.aW + 4) / env.c;
-      }
-      var t = spow(alpha * Math.pow(1.64 - Math.pow(.29, env.n), -.73), 10 / 9);
-      var et = .25 * (Math.cos(hRad + 2) + 3.8);
-      var A = env.aW * spow(Jroot, 2 / env.c / env.z);
-      var p1 = 5e4 / 13 * env.nc * env.ncb * et;
-      var p2 = A / env.nbb;
-      var r = 23 * (p2 + .305) * zdiv(t, 23 * p1 + t * (11 * cosh + 108 * sinh));
-      var a2 = r * cosh;
-      var b2 = r * sinh;
-      var rgb_c = unadapt(multiplyMatrices(m1, [ p2, a2, b2 ]).map(function(c4) {
-        return c4 * 1 / 1403;
-      }), env.fl);
-      return multiplyMatrices(cat16Inv, rgb_c.map(function(c4, i) {
-        return c4 * env.dRgbInv[i];
-      })).map(function(c4) {
-        return c4 / 100;
-      });
-    }
-    function toCam16(xyzd65, env) {
-      var xyz100 = xyzd65.map(function(c4) {
-        return c4 * 100;
-      });
-      var rgbA = adapt$1(multiplyMatrices(cat16, xyz100).map(function(c4, i) {
-        return c4 * env.dRgb[i];
-      }), env.fl);
-      var a2 = rgbA[0] + (-12 * rgbA[1] + rgbA[2]) / 11;
-      var b2 = (rgbA[0] + rgbA[1] - 2 * rgbA[2]) / 9;
-      var hRad = (Math.atan2(b2, a2) % tau + tau) % tau;
-      var et = .25 * (Math.cos(hRad + 2) + 3.8);
-      var t = 5e4 / 13 * env.nc * env.ncb * zdiv(et * Math.sqrt(Math.pow(a2, 2) + Math.pow(b2, 2)), rgbA[0] + rgbA[1] + 1.05 * rgbA[2] + .305);
-      var alpha = spow(t, .9) * Math.pow(1.64 - Math.pow(.29, env.n), .73);
-      var A = env.nbb * (2 * rgbA[0] + rgbA[1] + .05 * rgbA[2]);
-      var Jroot = spow(A / env.aW, .5 * env.c * env.z);
-      var J = 100 * spow(Jroot, 2);
-      var Q = 4 / env.c * Jroot * (env.aW + 4) * env.flRoot;
-      var C = alpha * Jroot;
-      var M = C * env.flRoot;
-      var h = constrain(hRad * rad2deg);
-      var H = hueQuadrature(h);
-      var s = 50 * spow(env.c * alpha / (env.aW + 4), 1 / 2);
-      return {
-        J: J,
-        C: C,
-        h: h,
-        s: s,
-        Q: Q,
-        M: M,
-        H: H
-      };
-    }
-    var cam16 = new ColorSpace({
-      id: 'cam16-jmh',
-      cssId: '--cam16-jmh',
-      name: 'CAM16-JMh',
+    var \u03b5$3 = 216 / 24389;
+    var \u03b53$1 = 24 / 116;
+    var \u03ba$1 = 24389 / 27;
+    var white$1 = WHITES.D50;
+    var lab = new ColorSpace({
+      id: 'lab',
+      name: 'Lab',
       coords: {
-        j: {
+        l: {
           refRange: [ 0, 100 ],
-          name: 'J'
+          name: 'L'
         },
-        m: {
-          refRange: [ 0, 105 ],
-          name: 'Colorfulness'
+        a: {
+          refRange: [ -125, 125 ]
         },
-        h: {
-          refRange: [ 0, 360 ],
-          type: 'angle',
-          name: 'Hue'
+        b: {
+          refRange: [ -125, 125 ]
         }
       },
-      base: xyz_d65,
-      fromBase: function fromBase(xyz) {
-        var cam162 = toCam16(xyz, viewingConditions$1);
-        return [ cam162.J, cam162.M, cam162.h ];
+      white: white$1,
+      base: XYZ_D50,
+      fromBase: function fromBase(XYZ) {
+        var xyz = XYZ.map(function(value, i) {
+          return value / white$1[i];
+        });
+        var f = xyz.map(function(value) {
+          return value > \u03b5$3 ? Math.cbrt(value) : (\u03ba$1 * value + 16) / 116;
+        });
+        return [ 116 * f[1] - 16, 500 * (f[0] - f[1]), 200 * (f[1] - f[2]) ];
+      },
+      toBase: function toBase(Lab) {
+        var f = [];
+        f[1] = (Lab[0] + 16) / 116;
+        f[0] = Lab[1] / 500 + f[1];
+        f[2] = f[1] - Lab[2] / 200;
+        var xyz = [ f[0] > \u03b53$1 ? Math.pow(f[0], 3) : (116 * f[0] - 16) / \u03ba$1, Lab[0] > 8 ? Math.pow((Lab[0] + 16) / 116, 3) : Lab[0] / \u03ba$1, f[2] > \u03b53$1 ? Math.pow(f[2], 3) : (116 * f[2] - 16) / \u03ba$1 ];
+        return xyz.map(function(value, i) {
+          return value * white$1[i];
+        });
       },
-      toBase: function toBase(cam162) {
-        return fromCam16({
-          J: cam162[0],
-          M: cam162[1],
-          h: cam162[2]
-        }, viewingConditions$1);
+      formats: {
+        lab: {
+          coords: [ ' | ', '', '' ]
+        }
       }
     });
-    var white$2 = WHITES.D65;
-    var \u03b5$4 = 216 / 24389;
-    var \u03ba$3 = 24389 / 27;
-    function toLstar(y) {
-      var fy = y > \u03b5$4 ? Math.cbrt(y) : (\u03ba$3 * y + 16) / 116;
-      return 116 * fy - 16;
-    }
-    function fromLstar(lstar) {
-      return lstar > 8 ? Math.pow((lstar + 16) / 116, 3) : lstar / \u03ba$3;
+    function constrain(angle) {
+      return (angle % 360 + 360) % 360;
     }
-    function fromHct(coords, env) {
-      var _coords2 = _slicedToArray(coords, 3), h = _coords2[0], c4 = _coords2[1], t = _coords2[2];
-      var xyz = [];
-      var j = 0;
-      if (t === 0) {
-        return [ 0, 0, 0 ];
+    function adjust(arc, angles) {
+      if (arc === 'raw') {
+        return angles;
       }
-      var y = fromLstar(t);
-      if (t > 0) {
-        j = .00379058511492914 * Math.pow(t, 2) + .608983189401032 * t + .9155088574762233;
-      } else {
-        j = 9514440756550361e-21 * Math.pow(t, 2) + .08693057439788597 * t - 21.928975842194614;
-      }
-      var threshold = 2e-12;
-      var max_attempts = 15;
-      var attempt = 0;
-      var last2 = Infinity;
-      while (attempt <= max_attempts) {
-        xyz = fromCam16({
-          J: j,
-          C: c4,
-          h: h
-        }, env);
-        var delta = Math.abs(xyz[1] - y);
-        if (delta < last2) {
-          if (delta <= threshold) {
-            return xyz;
+      var _angles$map = angles.map(constrain), _angles$map2 = _slicedToArray(_angles$map, 2), a1 = _angles$map2[0], a2 = _angles$map2[1];
+      var angleDiff = a2 - a1;
+      if (arc === 'increasing') {
+        if (angleDiff < 0) {
+          a2 += 360;
+        }
+      } else if (arc === 'decreasing') {
+        if (angleDiff > 0) {
+          a1 += 360;
+        }
+      } else if (arc === 'longer') {
+        if (-180 < angleDiff && angleDiff < 180) {
+          if (angleDiff > 0) {
+            a2 += 360;
+          } else {
+            a1 += 360;
           }
-          last2 = delta;
         }
-        j = j - (xyz[1] - y) * j / (2 * xyz[1]);
-        attempt += 1;
-      }
-      return fromCam16({
-        J: j,
-        C: c4,
-        h: h
-      }, env);
-    }
-    function toHct(xyz, env) {
-      var t = toLstar(xyz[1]);
-      if (t === 0) {
-        return [ 0, 0, 0 ];
+      } else if (arc === 'shorter') {
+        if (angleDiff > 180) {
+          a1 += 360;
+        } else if (angleDiff < -180) {
+          a2 += 360;
+        }
       }
-      var cam162 = toCam16(xyz, viewingConditions);
-      return [ constrain(cam162.h), cam162.C, t ];
+      return [ a1, a2 ];
     }
-    var viewingConditions = environment(white$2, 200 / Math.PI * fromLstar(50), fromLstar(50) * 100, 'average', false);
-    var hct = new ColorSpace({
-      id: 'hct',
-      name: 'HCT',
+    var lch = new ColorSpace({
+      id: 'lch',
+      name: 'LCH',
       coords: {
+        l: {
+          refRange: [ 0, 100 ],
+          name: 'Lightness'
+        },
+        c: {
+          refRange: [ 0, 150 ],
+          name: 'Chroma'
+        },
         h: {
           refRange: [ 0, 360 ],
           type: 'angle',
           name: 'Hue'
-        },
-        c: {
-          refRange: [ 0, 145 ],
-          name: 'Colorfulness'
-        },
-        t: {
-          refRange: [ 0, 100 ],
-          name: 'Tone'
         }
       },
-      base: xyz_d65,
-      fromBase: function fromBase(xyz) {
-        return toHct(xyz);
+      base: lab,
+      fromBase: function fromBase(Lab) {
+        var _Lab = _slicedToArray(Lab, 3), L = _Lab[0], a2 = _Lab[1], b2 = _Lab[2];
+        var hue;
+        var \u03b52 = .02;
+        if (Math.abs(a2) < \u03b52 && Math.abs(b2) < \u03b52) {
+          hue = NaN;
+        } else {
+          hue = Math.atan2(b2, a2) * 180 / Math.PI;
+        }
+        return [ L, Math.sqrt(Math.pow(a2, 2) + Math.pow(b2, 2)), constrain(hue) ];
       },
-      toBase: function toBase(hct2) {
-        return fromHct(hct2, viewingConditions);
+      toBase: function toBase(LCH) {
+        var _LCH = _slicedToArray(LCH, 3), Lightness = _LCH[0], Chroma = _LCH[1], Hue = _LCH[2];
+        if (Chroma < 0) {
+          Chroma = 0;
+        }
+        if (isNaN(Hue)) {
+          Hue = 0;
+        }
+        return [ Lightness, Chroma * Math.cos(Hue * Math.PI / 180), Chroma * Math.sin(Hue * Math.PI / 180) ];
       },
       formats: {
-        color: {
-          id: '--hct',
-          coords: [ ' | ', ' | ', ' | ' ]
+        lch: {
+          coords: [ ' | ', '', ' | ' ]
         }
       }
     });
-    var deg2rad = Math.PI / 180;
-    var ucsCoeff = [ 1, .007, .0228 ];
-    function convertUcsAb(coords) {
-      if (coords[1] < 0) {
-        coords = hct.fromBase(hct.toBase(coords));
+    var Gfactor = Math.pow(25, 7);
+    var \u03c0$1 = Math.PI;
+    var r2d = 180 / \u03c0$1;
+    var d2r$1 = \u03c0$1 / 180;
+    function deltaE2000(color, sample) {
+      var _ref46 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, _ref46$kL = _ref46.kL, kL = _ref46$kL === void 0 ? 1 : _ref46$kL, _ref46$kC = _ref46.kC, kC = _ref46$kC === void 0 ? 1 : _ref46$kC, _ref46$kH = _ref46.kH, kH = _ref46$kH === void 0 ? 1 : _ref46$kH;
+      var _lab$from = lab.from(color), _lab$from2 = _slicedToArray(_lab$from, 3), L1 = _lab$from2[0], a1 = _lab$from2[1], b1 = _lab$from2[2];
+      var C1 = lch.from(lab, [ L1, a1, b1 ])[1];
+      var _lab$from3 = lab.from(sample), _lab$from4 = _slicedToArray(_lab$from3, 3), L2 = _lab$from4[0], a2 = _lab$from4[1], b2 = _lab$from4[2];
+      var C2 = lch.from(lab, [ L2, a2, b2 ])[1];
+      if (C1 < 0) {
+        C1 = 0;
       }
-      var M = Math.log(Math.max(1 + ucsCoeff[2] * coords[1] * viewingConditions.flRoot, 1)) / ucsCoeff[2];
-      var hrad = coords[0] * deg2rad;
-      var a2 = M * Math.cos(hrad);
-      var b2 = M * Math.sin(hrad);
-      return [ coords[2], a2, b2 ];
-    }
-    function deltaEHCT(color, sample) {
-      var _getColor11 = getColor([ color, sample ]);
-      var _getColor12 = _slicedToArray(_getColor11, 2);
-      color = _getColor12[0];
-      sample = _getColor12[1];
-      var _convertUcsAb = convertUcsAb(hct.from(color)), _convertUcsAb2 = _slicedToArray(_convertUcsAb, 3), t1 = _convertUcsAb2[0], a1 = _convertUcsAb2[1], b1 = _convertUcsAb2[2];
-      var _convertUcsAb3 = convertUcsAb(hct.from(sample)), _convertUcsAb4 = _slicedToArray(_convertUcsAb3, 3), t2 = _convertUcsAb4[0], a2 = _convertUcsAb4[1], b2 = _convertUcsAb4[2];
-      return Math.sqrt(Math.pow(t1 - t2, 2) + Math.pow(a1 - a2, 2) + Math.pow(b1 - b2, 2));
-    }
-    var deltaEMethods = {
-      deltaE76: deltaE76,
-      deltaECMC: deltaECMC,
-      deltaE2000: deltaE2000,
-      deltaEJz: deltaEJz,
-      deltaEITP: deltaEITP,
-      deltaEOK: deltaEOK,
-      deltaEHCT: deltaEHCT
-    };
-    function calcEpsilon(jnd) {
-      var order = !jnd ? 0 : Math.floor(Math.log10(Math.abs(jnd)));
-      return Math.max(parseFloat('1e'.concat(order - 2)), 1e-6);
-    }
-    var GMAPPRESET = {
-      hct: {
-        method: 'hct.c',
-        jnd: 2,
-        deltaEMethod: 'hct',
-        blackWhiteClamp: {}
-      },
-      'hct-tonal': {
-        method: 'hct.c',
-        jnd: 0,
-        deltaEMethod: 'hct',
-        blackWhiteClamp: {
-          channel: 'hct.t',
-          min: 0,
-          max: 100
-        }
+      if (C2 < 0) {
+        C2 = 0;
       }
-    };
-    function toGamut(color) {
-      var _ref54 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, _ref54$method = _ref54.method, method = _ref54$method === void 0 ? defaults.gamut_mapping : _ref54$method, _ref54$space = _ref54.space, space = _ref54$space === void 0 ? void 0 : _ref54$space, _ref54$deltaEMethod = _ref54.deltaEMethod, deltaEMethod = _ref54$deltaEMethod === void 0 ? '' : _ref54$deltaEMethod, _ref54$jnd = _ref54.jnd, jnd = _ref54$jnd === void 0 ? 2 : _ref54$jnd, _ref54$blackWhiteClam = _ref54.blackWhiteClamp, blackWhiteClamp = _ref54$blackWhiteClam === void 0 ? {} : _ref54$blackWhiteClam;
-      color = getColor(color);
-      if (isString(arguments[1])) {
-        space = arguments[1];
-      } else if (!space) {
-        space = color.space;
+      var Cbar = (C1 + C2) / 2;
+      var C7 = Math.pow(Cbar, 7);
+      var G = .5 * (1 - Math.sqrt(C7 / (C7 + Gfactor)));
+      var adash1 = (1 + G) * a1;
+      var adash2 = (1 + G) * a2;
+      var Cdash1 = Math.sqrt(Math.pow(adash1, 2) + Math.pow(b1, 2));
+      var Cdash2 = Math.sqrt(Math.pow(adash2, 2) + Math.pow(b2, 2));
+      var h1 = adash1 === 0 && b1 === 0 ? 0 : Math.atan2(b1, adash1);
+      var h2 = adash2 === 0 && b2 === 0 ? 0 : Math.atan2(b2, adash2);
+      if (h1 < 0) {
+        h1 += 2 * \u03c0$1;
       }
-      space = ColorSpace.get(space);
-      if (inGamut(color, space, {
-        epsilon: 0
-      })) {
-        return color;
+      if (h2 < 0) {
+        h2 += 2 * \u03c0$1;
       }
-      var spaceColor;
-      if (method === 'css') {
-        spaceColor = toGamutCSS(color, {
-          space: space
-        });
+      h1 *= r2d;
+      h2 *= r2d;
+      var \u0394L = L2 - L1;
+      var \u0394C = Cdash2 - Cdash1;
+      var hdiff = h2 - h1;
+      var hsum = h1 + h2;
+      var habs = Math.abs(hdiff);
+      var \u0394h;
+      if (Cdash1 * Cdash2 === 0) {
+        \u0394h = 0;
+      } else if (habs <= 180) {
+        \u0394h = hdiff;
+      } else if (hdiff > 180) {
+        \u0394h = hdiff - 360;
+      } else if (hdiff < -180) {
+        \u0394h = hdiff + 360;
       } else {
-        if (method !== 'clip' && !inGamut(color, space)) {
-          if (Object.prototype.hasOwnProperty.call(GMAPPRESET, method)) {
-            var _GMAPPRESET$method = GMAPPRESET[method];
-            method = _GMAPPRESET$method.method;
-            jnd = _GMAPPRESET$method.jnd;
-            deltaEMethod = _GMAPPRESET$method.deltaEMethod;
-            blackWhiteClamp = _GMAPPRESET$method.blackWhiteClamp;
-          }
-          var de = deltaE2000;
-          if (deltaEMethod !== '') {
-            for (var m3 in deltaEMethods) {
-              if ('deltae' + deltaEMethod.toLowerCase() === m3.toLowerCase()) {
-                de = deltaEMethods[m3];
-                break;
-              }
-            }
-          }
-          var clipped = toGamut(to(color, space), {
-            method: 'clip',
-            space: space
-          });
-          if (de(color, clipped) > jnd) {
-            if (Object.keys(blackWhiteClamp).length === 3) {
-              var channelMeta = ColorSpace.resolveCoord(blackWhiteClamp.channel);
-              var channel = get(to(color, channelMeta.space), channelMeta.id);
-              if (isNone(channel)) {
-                channel = 0;
-              }
-              if (channel >= blackWhiteClamp.max) {
-                return to({
-                  space: 'xyz-d65',
-                  coords: WHITES['D65']
-                }, color.space);
-              } else if (channel <= blackWhiteClamp.min) {
-                return to({
-                  space: 'xyz-d65',
-                  coords: [ 0, 0, 0 ]
-                }, color.space);
-              }
-            }
-            var coordMeta = ColorSpace.resolveCoord(method);
-            var mapSpace = coordMeta.space;
-            var coordId = coordMeta.id;
-            var mappedColor = to(color, mapSpace);
-            mappedColor.coords.forEach(function(c4, i) {
-              if (isNone(c4)) {
-                mappedColor.coords[i] = 0;
-              }
-            });
-            var bounds = coordMeta.range || coordMeta.refRange;
-            var min = bounds[0];
-            var \u03b52 = calcEpsilon(jnd);
-            var low = min;
-            var high = get(mappedColor, coordId);
-            while (high - low > \u03b52) {
-              var clipped2 = clone2(mappedColor);
-              clipped2 = toGamut(clipped2, {
-                space: space,
-                method: 'clip'
-              });
-              var deltaE2 = de(mappedColor, clipped2);
-              if (deltaE2 - jnd < \u03b52) {
-                low = get(mappedColor, coordId);
-              } else {
-                high = get(mappedColor, coordId);
-              }
-              set(mappedColor, coordId, (low + high) / 2);
-            }
-            spaceColor = to(mappedColor, space);
-          } else {
-            spaceColor = clipped;
-          }
-        } else {
-          spaceColor = to(color, space);
-        }
-        if (method === 'clip' || !inGamut(spaceColor, space, {
-          epsilon: 0
-        })) {
-          var _bounds = Object.values(space.coords).map(function(c4) {
-            return c4.range || [];
-          });
-          spaceColor.coords = spaceColor.coords.map(function(c4, i) {
-            var _bounds$i = _slicedToArray(_bounds[i], 2), min = _bounds$i[0], max2 = _bounds$i[1];
-            if (min !== void 0) {
-              c4 = Math.max(min, c4);
-            }
-            if (max2 !== void 0) {
-              c4 = Math.min(c4, max2);
-            }
-            return c4;
-          });
-        }
+        console.log('the unthinkable has happened');
+      }
+      var \u0394H = 2 * Math.sqrt(Cdash2 * Cdash1) * Math.sin(\u0394h * d2r$1 / 2);
+      var Ldash = (L1 + L2) / 2;
+      var Cdash = (Cdash1 + Cdash2) / 2;
+      var Cdash7 = Math.pow(Cdash, 7);
+      var hdash;
+      if (Cdash1 * Cdash2 === 0) {
+        hdash = hsum;
+      } else if (habs <= 180) {
+        hdash = hsum / 2;
+      } else if (hsum < 360) {
+        hdash = (hsum + 360) / 2;
+      } else {
+        hdash = (hsum - 360) / 2;
       }
+      var lsq = Math.pow(Ldash - 50, 2);
+      var SL = 1 + .015 * lsq / Math.sqrt(20 + lsq);
+      var SC = 1 + .045 * Cdash;
+      var T = 1;
+      T -= .17 * Math.cos((hdash - 30) * d2r$1);
+      T += .24 * Math.cos(2 * hdash * d2r$1);
+      T += .32 * Math.cos((3 * hdash + 6) * d2r$1);
+      T -= .2 * Math.cos((4 * hdash - 63) * d2r$1);
+      var SH = 1 + .015 * Cdash * T;
+      var \u0394\u03b8 = 30 * Math.exp(-1 * Math.pow((hdash - 275) / 25, 2));
+      var RC = 2 * Math.sqrt(Cdash7 / (Cdash7 + Gfactor));
+      var RT = -1 * Math.sin(2 * \u0394\u03b8 * d2r$1) * RC;
+      var dE = Math.pow(\u0394L / (kL * SL), 2);
+      dE += Math.pow(\u0394C / (kC * SC), 2);
+      dE += Math.pow(\u0394H / (kH * SH), 2);
+      dE += RT * (\u0394C / (kC * SC)) * (\u0394H / (kH * SH));
+      return Math.sqrt(dE);
+    }
+    var \u03b5$2 = 75e-6;
+    function inGamut(color) {
+      var space = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : color.space;
+      var _ref47 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, _ref47$epsilon = _ref47.epsilon, epsilon = _ref47$epsilon === void 0 ? \u03b5$2 : _ref47$epsilon;
+      color = getColor(color);
+      space = ColorSpace.get(space);
+      var coords = color.coords;
       if (space !== color.space) {
-        spaceColor = to(spaceColor, color.space);
+        coords = space.from(color);
       }
-      color.coords = spaceColor.coords;
-      return color;
+      return space.inGamut(coords, {
+        epsilon: epsilon
+      });
     }
-    toGamut.returns = 'color';
-    var COLORS = {
-      WHITE: {
-        space: OKLab,
-        coords: [ 1, 0, 0 ]
-      },
-      BLACK: {
-        space: OKLab,
-        coords: [ 0, 0, 0 ]
-      }
-    };
-    function toGamutCSS(origin) {
-      var _ref55 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, space = _ref55.space;
-      var JND = .02;
-      var \u03b52 = 1e-4;
-      origin = getColor(origin);
-      if (!space) {
-        space = origin.space;
+    function clone2(color) {
+      return {
+        space: color.space,
+        coords: color.coords.slice(),
+        alpha: color.alpha
+      };
+    }
+    function toGamut(color) {
+      var _ref48 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, _ref48$method = _ref48.method, method = _ref48$method === void 0 ? defaults.gamut_mapping : _ref48$method, _ref48$space = _ref48.space, space = _ref48$space === void 0 ? color.space : _ref48$space;
+      if (isString(arguments[1])) {
+        space = arguments[1];
       }
       space = ColorSpace.get(space);
-      var oklchSpace = ColorSpace.get('oklch');
-      if (space.isUnbounded) {
-        return to(origin, space);
-      }
-      var origin_OKLCH = to(origin, oklchSpace);
-      var L = origin_OKLCH.coords[0];
-      if (L >= 1) {
-        var white2 = to(COLORS.WHITE, space);
-        white2.alpha = origin.alpha;
-        return to(white2, space);
-      }
-      if (L <= 0) {
-        var black = to(COLORS.BLACK, space);
-        black.alpha = origin.alpha;
-        return to(black, space);
-      }
-      if (inGamut(origin_OKLCH, space, {
+      if (inGamut(color, space, {
         epsilon: 0
       })) {
-        return to(origin_OKLCH, space);
+        return color;
       }
-      function clip(_color) {
-        var destColor = to(_color, space);
-        var spaceCoords = Object.values(space.coords);
-        destColor.coords = destColor.coords.map(function(coord, index) {
-          if ('range' in spaceCoords[index]) {
-            var _spaceCoords$index$ra = _slicedToArray(spaceCoords[index].range, 2), min2 = _spaceCoords$index$ra[0], max3 = _spaceCoords$index$ra[1];
-            return clamp(min2, coord, max3);
-          }
-          return coord;
+      var spaceColor = to(color, space);
+      if (method !== 'clip' && !inGamut(color, space)) {
+        var clipped = toGamut(clone2(spaceColor), {
+          method: 'clip',
+          space: space
         });
-        return destColor;
-      }
-      var min = 0;
-      var max2 = origin_OKLCH.coords[1];
-      var min_inGamut = true;
-      var current = clone2(origin_OKLCH);
-      var clipped = clip(current);
-      var E = deltaEOK(clipped, current);
-      if (E < JND) {
-        return clipped;
-      }
-      while (max2 - min > \u03b52) {
-        var chroma = (min + max2) / 2;
-        current.coords[1] = chroma;
-        if (min_inGamut && inGamut(current, space, {
-          epsilon: 0
-        })) {
-          min = chroma;
-        } else {
-          clipped = clip(current);
-          E = deltaEOK(clipped, current);
-          if (E < JND) {
-            if (JND - E < \u03b52) {
-              break;
+        if (deltaE2000(color, clipped) > 2) {
+          var coordMeta = ColorSpace.resolveCoord(method);
+          var mapSpace = coordMeta.space;
+          var coordId = coordMeta.id;
+          var mappedColor = to(spaceColor, mapSpace);
+          var bounds = coordMeta.range || coordMeta.refRange;
+          var min = bounds[0];
+          var \u03b52 = .01;
+          var low = min;
+          var high = get(mappedColor, coordId);
+          while (high - low > \u03b52) {
+            var clipped2 = clone2(mappedColor);
+            clipped2 = toGamut(clipped2, {
+              space: space,
+              method: 'clip'
+            });
+            var deltaE2 = deltaE2000(mappedColor, clipped2);
+            if (deltaE2 - 2 < \u03b52) {
+              low = get(mappedColor, coordId);
             } else {
-              min_inGamut = false;
-              min = chroma;
+              high = get(mappedColor, coordId);
             }
-          } else {
-            max2 = chroma;
+            set(mappedColor, coordId, (low + high) / 2);
           }
+          spaceColor = to(mappedColor, space);
+        } else {
+          spaceColor = clipped;
         }
       }
-      return clipped;
+      if (method === 'clip' || !inGamut(spaceColor, space, {
+        epsilon: 0
+      })) {
+        var _bounds = Object.values(space.coords).map(function(c4) {
+          return c4.range || [];
+        });
+        spaceColor.coords = spaceColor.coords.map(function(c4, i) {
+          var _bounds$i = _slicedToArray(_bounds[i], 2), min = _bounds$i[0], max2 = _bounds$i[1];
+          if (min !== void 0) {
+            c4 = Math.max(min, c4);
+          }
+          if (max2 !== void 0) {
+            c4 = Math.min(c4, max2);
+          }
+          return c4;
+        });
+      }
+      if (space !== color.space) {
+        spaceColor = to(spaceColor, color.space);
+      }
+      color.coords = spaceColor.coords;
+      return color;
     }
+    toGamut.returns = 'color';
     function to(color, space) {
-      var _ref56 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, inGamut2 = _ref56.inGamut;
+      var _ref49 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, inGamut2 = _ref49.inGamut;
       color = getColor(color);
       space = ColorSpace.get(space);
       var coords = space.from(color);
@@ -15686,21 +15513,24 @@
         alpha: color.alpha
       };
       if (inGamut2) {
-        ret = toGamut(ret, inGamut2 === true ? void 0 : inGamut2);
+        ret = toGamut(ret);
       }
       return ret;
     }
     to.returns = 'color';
     function serialize(color) {
-      var _ref58, _color$space$getForma;
-      var _ref57 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-      var _ref57$precision = _ref57.precision, precision = _ref57$precision === void 0 ? defaults.precision : _ref57$precision, _ref57$format = _ref57.format, format = _ref57$format === void 0 ? 'default' : _ref57$format, _ref57$inGamut = _ref57.inGamut, inGamut$1 = _ref57$inGamut === void 0 ? true : _ref57$inGamut, customOptions = _objectWithoutProperties(_ref57, _excluded9);
+      var _ref51, _color$space$getForma;
+      var _ref50 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+      var _ref50$precision = _ref50.precision, precision = _ref50$precision === void 0 ? defaults.precision : _ref50$precision, _ref50$format = _ref50.format, format = _ref50$format === void 0 ? 'default' : _ref50$format, _ref50$inGamut = _ref50.inGamut, inGamut$1 = _ref50$inGamut === void 0 ? true : _ref50$inGamut, customOptions = _objectWithoutProperties(_ref50, _excluded9);
       var ret;
       color = getColor(color);
       var formatId = format;
-      format = (_ref58 = (_color$space$getForma = color.space.getFormat(format)) !== null && _color$space$getForma !== void 0 ? _color$space$getForma : color.space.getFormat('default')) !== null && _ref58 !== void 0 ? _ref58 : ColorSpace.DEFAULT_FORMAT;
-      var coords = color.coords.slice();
+      format = (_ref51 = (_color$space$getForma = color.space.getFormat(format)) !== null && _color$space$getForma !== void 0 ? _color$space$getForma : color.space.getFormat('default')) !== null && _ref51 !== void 0 ? _ref51 : ColorSpace.DEFAULT_FORMAT;
       inGamut$1 || (inGamut$1 = format.toGamut);
+      var coords = color.coords;
+      coords = coords.map(function(c4) {
+        return c4 ? c4 : 0;
+      });
       if (inGamut$1 && !inGamut(color)) {
         coords = toGamut(clone2(color), inGamut$1 === true ? void 0 : inGamut$1).coords;
       }
@@ -15718,9 +15548,7 @@
         } else {
           if (precision !== null) {
             coords = coords.map(function(c4) {
-              return serializeNumber(c4, {
-                precision: precision
-              });
+              return toPrecision(c4, precision);
             });
           }
         }
@@ -15732,11 +15560,9 @@
         }
         var alpha = color.alpha;
         if (precision !== null) {
-          alpha = serializeNumber(alpha, {
-            precision: precision
-          });
+          alpha = toPrecision(alpha, precision);
         }
-        var strAlpha = color.alpha >= 1 || format.noAlpha ? '' : ''.concat(format.commas ? ',' : ' /', ' ').concat(alpha);
+        var strAlpha = color.alpha < 1 && !format.noAlpha ? ''.concat(format.commas ? ',' : ' /', ' ').concat(alpha) : '';
         ret = ''.concat(name, '(').concat(args.join(format.commas ? ', ' : ' ')).concat(strAlpha, ')');
       }
       return ret;
@@ -15745,11 +15571,13 @@
     var fromXYZ_M$5 = [ [ 1.716651187971268, -.355670783776392, -.25336628137366 ], [ -.666684351832489, 1.616481236634939, .0157685458139111 ], [ .017639857445311, -.042770613257809, .942103121235474 ] ];
     var REC2020Linear = new RGBColorSpace({
       id: 'rec2020-linear',
-      cssId: '--rec2020-linear',
       name: 'Linear REC.2020',
       white: 'D65',
       toXYZ_M: toXYZ_M$5,
-      fromXYZ_M: fromXYZ_M$5
+      fromXYZ_M: fromXYZ_M$5,
+      formats: {
+        color: {}
+      }
     });
     var \u03b1 = 1.09929682680944;
     var \u03b2 = .018053968510807;
@@ -15772,13 +15600,15 @@
           }
           return 4.5 * val;
         });
+      },
+      formats: {
+        color: {}
       }
     });
     var toXYZ_M$4 = [ [ .4865709486482162, .26566769316909306, .1982172852343625 ], [ .2289745640697488, .6917385218365064, .079286914093745 ], [ 0, .04511338185890264, 1.043944368900976 ] ];
     var fromXYZ_M$4 = [ [ 2.493496911941425, -.9313836179191239, -.40271078445071684 ], [ -.8294889695615747, 1.7626640603183463, .023624685841943577 ], [ .03584583024378447, -.07617238926804182, .9568845240076872 ] ];
     var P3Linear = new RGBColorSpace({
       id: 'p3-linear',
-      cssId: '--display-p3-linear',
       name: 'Linear P3',
       white: 'D65',
       toXYZ_M: toXYZ_M$4,
@@ -15791,7 +15621,10 @@
       name: 'Linear sRGB',
       white: 'D65',
       toXYZ_M: toXYZ_M$3,
-      fromXYZ_M: fromXYZ_M$3
+      fromXYZ_M: fromXYZ_M$3,
+      formats: {
+        color: {}
+      }
     });
     var KEYWORDS = {
       aliceblue: [ 240 / 255, 248 / 255, 1 ],
@@ -15963,7 +15796,7 @@
         return rgb.map(function(val) {
           var sign = val < 0 ? -1 : 1;
           var abs = val * sign;
-          if (abs <= .04045) {
+          if (abs < .04045) {
             return val / 12.92;
           }
           return sign * Math.pow((abs + .055) / 1.055, 2.4);
@@ -16011,7 +15844,7 @@
             };
           },
           serialize: function serialize(coords, alpha) {
-            var _ref59 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, _ref59$collapse = _ref59.collapse, collapse = _ref59$collapse === void 0 ? true : _ref59$collapse;
+            var _ref52 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, _ref52$collapse = _ref52.collapse, collapse = _ref52$collapse === void 0 ? true : _ref52$collapse;
             if (alpha < 1) {
               coords.push(alpha);
             }
@@ -16057,14 +15890,17 @@
     });
     var P3 = new RGBColorSpace({
       id: 'p3',
-      cssId: 'display-p3',
       name: 'P3',
       base: P3Linear,
       fromBase: sRGB.fromBase,
-      toBase: sRGB.toBase
+      toBase: sRGB.toBase,
+      formats: {
+        color: {
+          id: 'display-p3'
+        }
+      }
     });
     defaults.display_space = sRGB;
-    var supportsNone;
     if (typeof CSS !== 'undefined' && CSS.supports) {
       for (var _i15 = 0, _arr2 = [ lab, REC2020, P3 ]; _i15 < _arr2.length; _i15++) {
         var space = _arr2[_i15];
@@ -16082,35 +15918,32 @@
       }
     }
     function _display(color) {
-      var _ref60 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-      var _ref60$space = _ref60.space, space = _ref60$space === void 0 ? defaults.display_space : _ref60$space, options = _objectWithoutProperties(_ref60, _excluded10);
+      var _ref53 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
+      var _ref53$space = _ref53.space, space = _ref53$space === void 0 ? defaults.display_space : _ref53$space, options = _objectWithoutProperties(_ref53, _excluded10);
       var ret = serialize(color, options);
       if (typeof CSS === 'undefined' || CSS.supports('color', ret) || !defaults.display_space) {
         ret = new String(ret);
         ret.color = color;
       } else {
-        var fallbackColor = color;
-        var hasNone = color.coords.some(isNone) || isNone(color.alpha);
-        if (hasNone) {
-          var _supportsNone;
-          if (!((_supportsNone = supportsNone) !== null && _supportsNone !== void 0 ? _supportsNone : supportsNone = CSS.supports('color', 'hsl(none 50% 50%)'))) {
-            fallbackColor = clone2(color);
-            fallbackColor.coords = fallbackColor.coords.map(skipNone);
-            fallbackColor.alpha = skipNone(fallbackColor.alpha);
-            ret = serialize(fallbackColor, options);
-            if (CSS.supports('color', ret)) {
-              ret = new String(ret);
-              ret.color = fallbackColor;
-              return ret;
-            }
-          }
-        }
-        fallbackColor = to(fallbackColor, space);
+        var fallbackColor = to(color, space);
         ret = new String(serialize(fallbackColor, options));
         ret.color = fallbackColor;
       }
       return ret;
     }
+    function distance(color1, color2) {
+      var space = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 'lab';
+      space = ColorSpace.get(space);
+      var coords1 = space.from(color1);
+      var coords2 = space.from(color2);
+      return Math.sqrt(coords1.reduce(function(acc, c12, i) {
+        var c22 = coords2[i];
+        if (isNaN(c12) || isNaN(c22)) {
+          return acc;
+        }
+        return acc + Math.pow(c22 - c12, 2);
+      }, 0));
+    }
     function equals(color1, color2) {
       color1 = getColor(color1);
       color2 = getColor(color2);
@@ -16119,10 +15952,10 @@
       });
     }
     function getLuminance(color) {
-      return get(color, [ xyz_d65, 'y' ]);
+      return get(color, [ XYZ_D65, 'y' ]);
     }
     function setLuminance(color, value) {
-      set(color, [ xyz_d65, 'y' ], value);
+      set(color, [ XYZ_D65, 'y' ], value);
     }
     function register$2(Color3) {
       Object.defineProperty(Color3.prototype, 'luminance', {
@@ -16137,8 +15970,8 @@
     var luminance = Object.freeze({
       __proto__: null,
       getLuminance: getLuminance,
-      register: register$2,
-      setLuminance: setLuminance
+      setLuminance: setLuminance,
+      register: register$2
     });
     function contrastWCAG21(color1, color2) {
       color1 = getColor(color1);
@@ -16146,9 +15979,9 @@
       var Y1 = Math.max(getLuminance(color1), 0);
       var Y2 = Math.max(getLuminance(color2), 0);
       if (Y2 > Y1) {
-        var _ref61 = [ Y2, Y1 ];
-        Y1 = _ref61[0];
-        Y2 = _ref61[1];
+        var _ref54 = [ Y2, Y1 ];
+        Y1 = _ref54[0];
+        Y2 = _ref54[1];
       }
       return (Y1 + .05) / (Y2 + .05);
     }
@@ -16222,9 +16055,9 @@
       var Y1 = Math.max(getLuminance(color1), 0);
       var Y2 = Math.max(getLuminance(color2), 0);
       if (Y2 > Y1) {
-        var _ref62 = [ Y2, Y1 ];
-        Y1 = _ref62[0];
-        Y2 = _ref62[1];
+        var _ref55 = [ Y2, Y1 ];
+        Y1 = _ref55[0];
+        Y2 = _ref55[1];
       }
       var denom = Y1 + Y2;
       return denom === 0 ? 0 : (Y1 - Y2) / denom;
@@ -16236,134 +16069,447 @@
       var Y1 = Math.max(getLuminance(color1), 0);
       var Y2 = Math.max(getLuminance(color2), 0);
       if (Y2 > Y1) {
-        var _ref63 = [ Y2, Y1 ];
-        Y1 = _ref63[0];
-        Y2 = _ref63[1];
+        var _ref56 = [ Y2, Y1 ];
+        Y1 = _ref56[0];
+        Y2 = _ref56[1];
+      }
+      return Y2 === 0 ? max : (Y1 - Y2) / Y2;
+    }
+    function contrastLstar(color1, color2) {
+      color1 = getColor(color1);
+      color2 = getColor(color2);
+      var L1 = get(color1, [ lab, 'l' ]);
+      var L2 = get(color2, [ lab, 'l' ]);
+      return Math.abs(L1 - L2);
+    }
+    var \u03b5$1 = 216 / 24389;
+    var \u03b53 = 24 / 116;
+    var \u03ba = 24389 / 27;
+    var white = WHITES.D65;
+    var lab_d65 = new ColorSpace({
+      id: 'lab-d65',
+      name: 'Lab D65',
+      coords: {
+        l: {
+          refRange: [ 0, 100 ],
+          name: 'L'
+        },
+        a: {
+          refRange: [ -125, 125 ]
+        },
+        b: {
+          refRange: [ -125, 125 ]
+        }
+      },
+      white: white,
+      base: XYZ_D65,
+      fromBase: function fromBase(XYZ) {
+        var xyz = XYZ.map(function(value, i) {
+          return value / white[i];
+        });
+        var f = xyz.map(function(value) {
+          return value > \u03b5$1 ? Math.cbrt(value) : (\u03ba * value + 16) / 116;
+        });
+        return [ 116 * f[1] - 16, 500 * (f[0] - f[1]), 200 * (f[1] - f[2]) ];
+      },
+      toBase: function toBase(Lab) {
+        var f = [];
+        f[1] = (Lab[0] + 16) / 116;
+        f[0] = Lab[1] / 500 + f[1];
+        f[2] = f[1] - Lab[2] / 200;
+        var xyz = [ f[0] > \u03b53 ? Math.pow(f[0], 3) : (116 * f[0] - 16) / \u03ba, Lab[0] > 8 ? Math.pow((Lab[0] + 16) / 116, 3) : Lab[0] / \u03ba, f[2] > \u03b53 ? Math.pow(f[2], 3) : (116 * f[2] - 16) / \u03ba ];
+        return xyz.map(function(value, i) {
+          return value * white[i];
+        });
+      },
+      formats: {
+        'lab-d65': {
+          coords: [ ' | ', '', '' ]
+        }
+      }
+    });
+    var phi = Math.pow(5, .5) * .5 + .5;
+    function contrastDeltaPhi(color1, color2) {
+      color1 = getColor(color1);
+      color2 = getColor(color2);
+      var Lstr1 = get(color1, [ lab_d65, 'l' ]);
+      var Lstr2 = get(color2, [ lab_d65, 'l' ]);
+      var deltaPhiStar = Math.abs(Math.pow(Lstr1, phi) - Math.pow(Lstr2, phi));
+      var contrast2 = Math.pow(deltaPhiStar, 1 / phi) * Math.SQRT2 - 40;
+      return contrast2 < 7.5 ? 0 : contrast2;
+    }
+    var contrastMethods = Object.freeze({
+      __proto__: null,
+      contrastWCAG21: contrastWCAG21,
+      contrastAPCA: contrastAPCA,
+      contrastMichelson: contrastMichelson,
+      contrastWeber: contrastWeber,
+      contrastLstar: contrastLstar,
+      contrastDeltaPhi: contrastDeltaPhi
+    });
+    function contrast(background, foreground) {
+      var o = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
+      if (isString(o)) {
+        o = {
+          algorithm: o
+        };
+      }
+      var _o = o, algorithm = _o.algorithm, rest = _objectWithoutProperties(_o, _excluded11);
+      if (!algorithm) {
+        var algorithms = Object.keys(contrastMethods).map(function(a2) {
+          return a2.replace(/^contrast/, '');
+        }).join(', ');
+        throw new TypeError('contrast() function needs a contrast algorithm. Please specify one of: '.concat(algorithms));
+      }
+      background = getColor(background);
+      foreground = getColor(foreground);
+      for (var a2 in contrastMethods) {
+        if ('contrast' + algorithm.toLowerCase() === a2.toLowerCase()) {
+          return contrastMethods[a2](background, foreground, rest);
+        }
+      }
+      throw new TypeError('Unknown contrast algorithm: '.concat(algorithm));
+    }
+    function uv(color) {
+      var _getAll = getAll(color, XYZ_D65), _getAll2 = _slicedToArray(_getAll, 3), X = _getAll2[0], Y = _getAll2[1], Z = _getAll2[2];
+      var denom = X + 15 * Y + 3 * Z;
+      return [ 4 * X / denom, 9 * Y / denom ];
+    }
+    function xy(color) {
+      var _getAll3 = getAll(color, XYZ_D65), _getAll4 = _slicedToArray(_getAll3, 3), X = _getAll4[0], Y = _getAll4[1], Z = _getAll4[2];
+      var sum = X + Y + Z;
+      return [ X / sum, Y / sum ];
+    }
+    function register$1(Color3) {
+      Object.defineProperty(Color3.prototype, 'uv', {
+        get: function get() {
+          return uv(this);
+        }
+      });
+      Object.defineProperty(Color3.prototype, 'xy', {
+        get: function get() {
+          return xy(this);
+        }
+      });
+    }
+    var chromaticity = Object.freeze({
+      __proto__: null,
+      uv: uv,
+      xy: xy,
+      register: register$1
+    });
+    function deltaE76(color, sample) {
+      return distance(color, sample, 'lab');
+    }
+    var \u03c0 = Math.PI;
+    var d2r = \u03c0 / 180;
+    function deltaECMC(color, sample) {
+      var _ref57 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {}, _ref57$l = _ref57.l, l = _ref57$l === void 0 ? 2 : _ref57$l, _ref57$c = _ref57.c, c4 = _ref57$c === void 0 ? 1 : _ref57$c;
+      var _lab$from5 = lab.from(color), _lab$from6 = _slicedToArray(_lab$from5, 3), L1 = _lab$from6[0], a1 = _lab$from6[1], b1 = _lab$from6[2];
+      var _lch$from = lch.from(lab, [ L1, a1, b1 ]), _lch$from2 = _slicedToArray(_lch$from, 3), C1 = _lch$from2[1], H1 = _lch$from2[2];
+      var _lab$from7 = lab.from(sample), _lab$from8 = _slicedToArray(_lab$from7, 3), L2 = _lab$from8[0], a2 = _lab$from8[1], b2 = _lab$from8[2];
+      var C2 = lch.from(lab, [ L2, a2, b2 ])[1];
+      if (C1 < 0) {
+        C1 = 0;
+      }
+      if (C2 < 0) {
+        C2 = 0;
+      }
+      var \u0394L = L1 - L2;
+      var \u0394C = C1 - C2;
+      var \u0394a = a1 - a2;
+      var \u0394b = b1 - b2;
+      var H2 = Math.pow(\u0394a, 2) + Math.pow(\u0394b, 2) - Math.pow(\u0394C, 2);
+      var SL = .511;
+      if (L1 >= 16) {
+        SL = .040975 * L1 / (1 + .01765 * L1);
+      }
+      var SC = .0638 * C1 / (1 + .0131 * C1) + .638;
+      var T;
+      if (Number.isNaN(H1)) {
+        H1 = 0;
+      }
+      if (H1 >= 164 && H1 <= 345) {
+        T = .56 + Math.abs(.2 * Math.cos((H1 + 168) * d2r));
+      } else {
+        T = .36 + Math.abs(.4 * Math.cos((H1 + 35) * d2r));
+      }
+      var C4 = Math.pow(C1, 4);
+      var F = Math.sqrt(C4 / (C4 + 1900));
+      var SH = SC * (F * T + 1 - F);
+      var dE = Math.pow(\u0394L / (l * SL), 2);
+      dE += Math.pow(\u0394C / (c4 * SC), 2);
+      dE += H2 / Math.pow(SH, 2);
+      return Math.sqrt(dE);
+    }
+    var Yw$1 = 203;
+    var XYZ_Abs_D65 = new ColorSpace({
+      id: 'xyz-abs-d65',
+      name: 'Absolute XYZ D65',
+      coords: {
+        x: {
+          refRange: [ 0, 9504.7 ],
+          name: 'Xa'
+        },
+        y: {
+          refRange: [ 0, 1e4 ],
+          name: 'Ya'
+        },
+        z: {
+          refRange: [ 0, 10888.3 ],
+          name: 'Za'
+        }
+      },
+      base: XYZ_D65,
+      fromBase: function fromBase(XYZ) {
+        return XYZ.map(function(v) {
+          return Math.max(v * Yw$1, 0);
+        });
+      },
+      toBase: function toBase(AbsXYZ) {
+        return AbsXYZ.map(function(v) {
+          return Math.max(v / Yw$1, 0);
+        });
       }
-      return Y2 === 0 ? max : (Y1 - Y2) / Y2;
-    }
-    function contrastLstar(color1, color2) {
-      color1 = getColor(color1);
-      color2 = getColor(color2);
-      var L1 = get(color1, [ lab, 'l' ]);
-      var L2 = get(color2, [ lab, 'l' ]);
-      return Math.abs(L1 - L2);
-    }
-    var \u03b5$3 = 216 / 24389;
-    var \u03b53 = 24 / 116;
-    var \u03ba$2 = 24389 / 27;
-    var white$1 = WHITES.D65;
-    var lab_d65 = new ColorSpace({
-      id: 'lab-d65',
-      name: 'Lab D65',
+    });
+    var b$1 = 1.15;
+    var g = .66;
+    var n$1 = 2610 / Math.pow(2, 14);
+    var ninv$1 = Math.pow(2, 14) / 2610;
+    var c1$2 = 3424 / Math.pow(2, 12);
+    var c2$2 = 2413 / Math.pow(2, 7);
+    var c3$2 = 2392 / Math.pow(2, 7);
+    var p = 1.7 * 2523 / Math.pow(2, 5);
+    var pinv = Math.pow(2, 5) / (1.7 * 2523);
+    var d = -.56;
+    var d0 = 16295499532821565e-27;
+    var XYZtoCone_M = [ [ .41478972, .579999, .014648 ], [ -.20151, 1.120649, .0531008 ], [ -.0166008, .2648, .6684799 ] ];
+    var ConetoXYZ_M = [ [ 1.9242264357876067, -1.0047923125953657, .037651404030618 ], [ .35031676209499907, .7264811939316552, -.06538442294808501 ], [ -.09098281098284752, -.3127282905230739, 1.5227665613052603 ] ];
+    var ConetoIab_M = [ [ .5, .5, 0 ], [ 3.524, -4.066708, .542708 ], [ .199076, 1.096799, -1.295875 ] ];
+    var IabtoCone_M = [ [ 1, .1386050432715393, .05804731615611886 ], [ .9999999999999999, -.1386050432715393, -.05804731615611886 ], [ .9999999999999998, -.09601924202631895, -.8118918960560388 ] ];
+    var Jzazbz = new ColorSpace({
+      id: 'jzazbz',
+      name: 'Jzazbz',
       coords: {
-        l: {
-          refRange: [ 0, 100 ],
-          name: 'Lightness'
+        jz: {
+          refRange: [ 0, 1 ],
+          name: 'Jz'
         },
-        a: {
-          refRange: [ -125, 125 ]
+        az: {
+          refRange: [ -.5, .5 ]
         },
-        b: {
-          refRange: [ -125, 125 ]
+        bz: {
+          refRange: [ -.5, .5 ]
         }
       },
-      white: white$1,
-      base: xyz_d65,
+      base: XYZ_Abs_D65,
       fromBase: function fromBase(XYZ) {
-        var xyz = XYZ.map(function(value, i) {
-          return value / white$1[i];
-        });
-        var f = xyz.map(function(value) {
-          return value > \u03b5$3 ? Math.cbrt(value) : (\u03ba$2 * value + 16) / 116;
+        var _XYZ = _slicedToArray(XYZ, 3), Xa = _XYZ[0], Ya = _XYZ[1], Za = _XYZ[2];
+        var Xm = b$1 * Xa - (b$1 - 1) * Za;
+        var Ym = g * Ya - (g - 1) * Xa;
+        var LMS = multiplyMatrices(XYZtoCone_M, [ Xm, Ym, Za ]);
+        var PQLMS = LMS.map(function(val) {
+          var num = c1$2 + c2$2 * Math.pow(val / 1e4, n$1);
+          var denom = 1 + c3$2 * Math.pow(val / 1e4, n$1);
+          return Math.pow(num / denom, p);
         });
-        return [ 116 * f[1] - 16, 500 * (f[0] - f[1]), 200 * (f[1] - f[2]) ];
+        var _multiplyMatrices = multiplyMatrices(ConetoIab_M, PQLMS), _multiplyMatrices2 = _slicedToArray(_multiplyMatrices, 3), Iz = _multiplyMatrices2[0], az = _multiplyMatrices2[1], bz = _multiplyMatrices2[2];
+        var Jz = (1 + d) * Iz / (1 + d * Iz) - d0;
+        return [ Jz, az, bz ];
       },
-      toBase: function toBase(Lab) {
-        var f = [];
-        f[1] = (Lab[0] + 16) / 116;
-        f[0] = Lab[1] / 500 + f[1];
-        f[2] = f[1] - Lab[2] / 200;
-        var xyz = [ f[0] > \u03b53 ? Math.pow(f[0], 3) : (116 * f[0] - 16) / \u03ba$2, Lab[0] > 8 ? Math.pow((Lab[0] + 16) / 116, 3) : Lab[0] / \u03ba$2, f[2] > \u03b53 ? Math.pow(f[2], 3) : (116 * f[2] - 16) / \u03ba$2 ];
-        return xyz.map(function(value, i) {
-          return value * white$1[i];
+      toBase: function toBase(Jzazbz2) {
+        var _Jzazbz = _slicedToArray(Jzazbz2, 3), Jz = _Jzazbz[0], az = _Jzazbz[1], bz = _Jzazbz[2];
+        var Iz = (Jz + d0) / (1 + d - d * (Jz + d0));
+        var PQLMS = multiplyMatrices(IabtoCone_M, [ Iz, az, bz ]);
+        var LMS = PQLMS.map(function(val) {
+          var num = c1$2 - Math.pow(val, pinv);
+          var denom = c3$2 * Math.pow(val, pinv) - c2$2;
+          var x = 1e4 * Math.pow(num / denom, ninv$1);
+          return x;
         });
+        var _multiplyMatrices3 = multiplyMatrices(ConetoXYZ_M, LMS), _multiplyMatrices4 = _slicedToArray(_multiplyMatrices3, 3), Xm = _multiplyMatrices4[0], Ym = _multiplyMatrices4[1], Za = _multiplyMatrices4[2];
+        var Xa = (Xm + (b$1 - 1) * Za) / b$1;
+        var Ya = (Ym + (g - 1) * Xa) / g;
+        return [ Xa, Ya, Za ];
       },
       formats: {
-        'lab-d65': {
-          coords: [ ' | ', ' | [-1,1]', ' | [-1,1]' ]
-        }
+        color: {}
       }
     });
-    var phi = Math.pow(5, .5) * .5 + .5;
-    function contrastDeltaPhi(color1, color2) {
-      color1 = getColor(color1);
-      color2 = getColor(color2);
-      var Lstr1 = get(color1, [ lab_d65, 'l' ]);
-      var Lstr2 = get(color2, [ lab_d65, 'l' ]);
-      var deltaPhiStar = Math.abs(Math.pow(Lstr1, phi) - Math.pow(Lstr2, phi));
-      var contrast2 = Math.pow(deltaPhiStar, 1 / phi) * Math.SQRT2 - 40;
-      return contrast2 < 7.5 ? 0 : contrast2;
-    }
-    var contrastMethods = Object.freeze({
-      __proto__: null,
-      contrastAPCA: contrastAPCA,
-      contrastDeltaPhi: contrastDeltaPhi,
-      contrastLstar: contrastLstar,
-      contrastMichelson: contrastMichelson,
-      contrastWCAG21: contrastWCAG21,
-      contrastWeber: contrastWeber
-    });
-    function contrast(background, foreground) {
-      var o = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
-      if (isString(o)) {
-        o = {
-          algorithm: o
-        };
+    var jzczhz = new ColorSpace({
+      id: 'jzczhz',
+      name: 'JzCzHz',
+      coords: {
+        jz: {
+          refRange: [ 0, 1 ],
+          name: 'Jz'
+        },
+        cz: {
+          refRange: [ 0, 1 ],
+          name: 'Chroma'
+        },
+        hz: {
+          refRange: [ 0, 360 ],
+          type: 'angle',
+          name: 'Hue'
+        }
+      },
+      base: Jzazbz,
+      fromBase: function fromBase(jzazbz) {
+        var _jzazbz = _slicedToArray(jzazbz, 3), Jz = _jzazbz[0], az = _jzazbz[1], bz = _jzazbz[2];
+        var hue;
+        var \u03b52 = 2e-4;
+        if (Math.abs(az) < \u03b52 && Math.abs(bz) < \u03b52) {
+          hue = NaN;
+        } else {
+          hue = Math.atan2(bz, az) * 180 / Math.PI;
+        }
+        return [ Jz, Math.sqrt(Math.pow(az, 2) + Math.pow(bz, 2)), constrain(hue) ];
+      },
+      toBase: function toBase(jzczhz2) {
+        return [ jzczhz2[0], jzczhz2[1] * Math.cos(jzczhz2[2] * Math.PI / 180), jzczhz2[1] * Math.sin(jzczhz2[2] * Math.PI / 180) ];
+      },
+      formats: {
+        color: {}
       }
-      var _o = o, algorithm = _o.algorithm, rest = _objectWithoutProperties(_o, _excluded11);
-      if (!algorithm) {
-        var algorithms = Object.keys(contrastMethods).map(function(a2) {
-          return a2.replace(/^contrast/, '');
-        }).join(', ');
-        throw new TypeError('contrast() function needs a contrast algorithm. Please specify one of: '.concat(algorithms));
+    });
+    function deltaEJz(color, sample) {
+      var _jzczhz$from = jzczhz.from(color), _jzczhz$from2 = _slicedToArray(_jzczhz$from, 3), Jz1 = _jzczhz$from2[0], Cz1 = _jzczhz$from2[1], Hz1 = _jzczhz$from2[2];
+      var _jzczhz$from3 = jzczhz.from(sample), _jzczhz$from4 = _slicedToArray(_jzczhz$from3, 3), Jz2 = _jzczhz$from4[0], Cz2 = _jzczhz$from4[1], Hz2 = _jzczhz$from4[2];
+      var \u0394J = Jz1 - Jz2;
+      var \u0394C = Cz1 - Cz2;
+      if (Number.isNaN(Hz1) && Number.isNaN(Hz2)) {
+        Hz1 = 0;
+        Hz2 = 0;
+      } else if (Number.isNaN(Hz1)) {
+        Hz1 = Hz2;
+      } else if (Number.isNaN(Hz2)) {
+        Hz2 = Hz1;
       }
-      background = getColor(background);
-      foreground = getColor(foreground);
-      for (var a2 in contrastMethods) {
-        if ('contrast' + algorithm.toLowerCase() === a2.toLowerCase()) {
-          return contrastMethods[a2](background, foreground, rest);
+      var \u0394h = Hz1 - Hz2;
+      var \u0394H = 2 * Math.sqrt(Cz1 * Cz2) * Math.sin(\u0394h / 2 * (Math.PI / 180));
+      return Math.sqrt(Math.pow(\u0394J, 2) + Math.pow(\u0394C, 2) + Math.pow(\u0394H, 2));
+    }
+    var c1$1 = 3424 / 4096;
+    var c2$1 = 2413 / 128;
+    var c3$1 = 2392 / 128;
+    var m1 = 2610 / 16384;
+    var m2 = 2523 / 32;
+    var im1 = 16384 / 2610;
+    var im2 = 32 / 2523;
+    var XYZtoLMS_M$1 = [ [ .3592, .6976, -.0358 ], [ -.1922, 1.1004, .0755 ], [ .007, .0749, .8434 ] ];
+    var LMStoIPT_M = [ [ 2048 / 4096, 2048 / 4096, 0 ], [ 6610 / 4096, -13613 / 4096, 7003 / 4096 ], [ 17933 / 4096, -17390 / 4096, -543 / 4096 ] ];
+    var IPTtoLMS_M = [ [ .9999888965628402, .008605050147287059, .11103437159861648 ], [ 1.00001110343716, -.008605050147287059, -.11103437159861648 ], [ 1.0000320633910054, .56004913547279, -.3206339100541203 ] ];
+    var LMStoXYZ_M$1 = [ [ 2.0701800566956137, -1.326456876103021, .20661600684785517 ], [ .3649882500326575, .6804673628522352, -.04542175307585323 ], [ -.04959554223893211, -.04942116118675749, 1.1879959417328034 ] ];
+    var ictcp = new ColorSpace({
+      id: 'ictcp',
+      name: 'ICTCP',
+      coords: {
+        i: {
+          refRange: [ 0, 1 ],
+          name: 'I'
+        },
+        ct: {
+          refRange: [ -.5, .5 ],
+          name: 'CT'
+        },
+        cp: {
+          refRange: [ -.5, .5 ],
+          name: 'CP'
         }
+      },
+      base: XYZ_Abs_D65,
+      fromBase: function fromBase(XYZ) {
+        var LMS = multiplyMatrices(XYZtoLMS_M$1, XYZ);
+        return LMStoICtCp(LMS);
+      },
+      toBase: function toBase(ICtCp) {
+        var LMS = ICtCptoLMS(ICtCp);
+        return multiplyMatrices(LMStoXYZ_M$1, LMS);
+      },
+      formats: {
+        color: {}
       }
-      throw new TypeError('Unknown contrast algorithm: '.concat(algorithm));
+    });
+    function LMStoICtCp(LMS) {
+      var PQLMS = LMS.map(function(val) {
+        var num = c1$1 + c2$1 * Math.pow(val / 1e4, m1);
+        var denom = 1 + c3$1 * Math.pow(val / 1e4, m1);
+        return Math.pow(num / denom, m2);
+      });
+      return multiplyMatrices(LMStoIPT_M, PQLMS);
     }
-    function uv(color) {
-      var _getAll = getAll(color, xyz_d65), _getAll2 = _slicedToArray(_getAll, 3), X = _getAll2[0], Y = _getAll2[1], Z = _getAll2[2];
-      var denom = X + 15 * Y + 3 * Z;
-      return [ 4 * X / denom, 9 * Y / denom ];
+    function ICtCptoLMS(ICtCp) {
+      var PQLMS = multiplyMatrices(IPTtoLMS_M, ICtCp);
+      var LMS = PQLMS.map(function(val) {
+        var num = Math.max(Math.pow(val, im2) - c1$1, 0);
+        var denom = c2$1 - c3$1 * Math.pow(val, im2);
+        return 1e4 * Math.pow(num / denom, im1);
+      });
+      return LMS;
     }
-    function xy(color) {
-      var _getAll3 = getAll(color, xyz_d65), _getAll4 = _slicedToArray(_getAll3, 3), X = _getAll4[0], Y = _getAll4[1], Z = _getAll4[2];
-      var sum = X + Y + Z;
-      return [ X / sum, Y / sum ];
+    function deltaEITP(color, sample) {
+      var _ictcp$from = ictcp.from(color), _ictcp$from2 = _slicedToArray(_ictcp$from, 3), I1 = _ictcp$from2[0], T1 = _ictcp$from2[1], P1 = _ictcp$from2[2];
+      var _ictcp$from3 = ictcp.from(sample), _ictcp$from4 = _slicedToArray(_ictcp$from3, 3), I2 = _ictcp$from4[0], T2 = _ictcp$from4[1], P2 = _ictcp$from4[2];
+      return 720 * Math.sqrt(Math.pow(I1 - I2, 2) + .25 * Math.pow(T1 - T2, 2) + Math.pow(P1 - P2, 2));
     }
-    function register$1(Color3) {
-      Object.defineProperty(Color3.prototype, 'uv', {
-        get: function get() {
-          return uv(this);
+    var XYZtoLMS_M = [ [ .8190224432164319, .3619062562801221, -.12887378261216414 ], [ .0329836671980271, .9292868468965546, .03614466816999844 ], [ .048177199566046255, .26423952494422764, .6335478258136937 ] ];
+    var LMStoXYZ_M = [ [ 1.2268798733741557, -.5578149965554813, .28139105017721583 ], [ -.04057576262431372, 1.1122868293970594, -.07171106666151701 ], [ -.07637294974672142, -.4214933239627914, 1.5869240244272418 ] ];
+    var LMStoLab_M = [ [ .2104542553, .793617785, -.0040720468 ], [ 1.9779984951, -2.428592205, .4505937099 ], [ .0259040371, .7827717662, -.808675766 ] ];
+    var LabtoLMS_M = [ [ .9999999984505198, .39633779217376786, .2158037580607588 ], [ 1.0000000088817609, -.10556134232365635, -.06385417477170591 ], [ 1.0000000546724108, -.08948418209496575, -1.2914855378640917 ] ];
+    var OKLab = new ColorSpace({
+      id: 'oklab',
+      name: 'OKLab',
+      coords: {
+        l: {
+          refRange: [ 0, 1 ],
+          name: 'L'
+        },
+        a: {
+          refRange: [ -.4, .4 ]
+        },
+        b: {
+          refRange: [ -.4, .4 ]
         }
-      });
-      Object.defineProperty(Color3.prototype, 'xy', {
-        get: function get() {
-          return xy(this);
+      },
+      white: 'D65',
+      base: XYZ_D65,
+      fromBase: function fromBase(XYZ) {
+        var LMS = multiplyMatrices(XYZtoLMS_M, XYZ);
+        var LMSg = LMS.map(function(val) {
+          return Math.cbrt(val);
+        });
+        return multiplyMatrices(LMStoLab_M, LMSg);
+      },
+      toBase: function toBase(OKLab2) {
+        var LMSg = multiplyMatrices(LabtoLMS_M, OKLab2);
+        var LMS = LMSg.map(function(val) {
+          return Math.pow(val, 3);
+        });
+        return multiplyMatrices(LMStoXYZ_M, LMS);
+      },
+      formats: {
+        oklab: {
+          coords: [ ' | ', '', '' ]
         }
-      });
+      }
+    });
+    function deltaEOK(color, sample) {
+      var _OKLab$from = OKLab.from(color), _OKLab$from2 = _slicedToArray(_OKLab$from, 3), L1 = _OKLab$from2[0], a1 = _OKLab$from2[1], b1 = _OKLab$from2[2];
+      var _OKLab$from3 = OKLab.from(sample), _OKLab$from4 = _slicedToArray(_OKLab$from3, 3), L2 = _OKLab$from4[0], a2 = _OKLab$from4[1], b2 = _OKLab$from4[2];
+      var \u0394L = L1 - L2;
+      var \u0394a = a1 - a2;
+      var \u0394b = b1 - b2;
+      return Math.sqrt(Math.pow(\u0394L, 2) + Math.pow(\u0394a, 2) + Math.pow(\u0394b, 2));
     }
-    var chromaticity = Object.freeze({
+    var deltaEMethods = Object.freeze({
       __proto__: null,
-      register: register$1,
-      uv: uv,
-      xy: xy
+      deltaE76: deltaE76,
+      deltaECMC: deltaECMC,
+      deltaE2000: deltaE2000,
+      deltaEJz: deltaEJz,
+      deltaEITP: deltaEITP,
+      deltaEOK: deltaEOK
     });
     function deltaE(c12, c22) {
       var o = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
@@ -16373,6 +16519,8 @@
         };
       }
       var _o2 = o, _o2$method = _o2.method, method = _o2$method === void 0 ? defaults.deltaE : _o2$method, rest = _objectWithoutProperties(_o2, _excluded12);
+      c12 = getColor(c12);
+      c22 = getColor(c22);
       for (var m3 in deltaEMethods) {
         if ('deltae' + method.toLowerCase() === m3.toLowerCase()) {
           return deltaEMethods[m3](c12, c22, rest);
@@ -16398,21 +16546,26 @@
     }
     var variations = Object.freeze({
       __proto__: null,
-      darken: darken,
-      lighten: lighten
+      lighten: lighten,
+      darken: darken
     });
     function mix(c12, c22) {
       var p2 = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : .5;
       var o = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};
-      var _ref64 = [ getColor(c12), getColor(c22) ];
-      c12 = _ref64[0];
-      c22 = _ref64[1];
+      var _ref58 = [ getColor(c12), getColor(c22) ];
+      c12 = _ref58[0];
+      c22 = _ref58[1];
       if (type(p2) === 'object') {
-        var _ref65 = [ .5, p2 ];
-        p2 = _ref65[0];
-        o = _ref65[1];
+        var _ref59 = [ .5, p2 ];
+        p2 = _ref59[0];
+        o = _ref59[1];
       }
-      var r = range(c12, c22, o);
+      var _o3 = o, space = _o3.space, outputSpace = _o3.outputSpace, premultiplied = _o3.premultiplied;
+      var r = range(c12, c22, {
+        space: space,
+        outputSpace: outputSpace,
+        premultiplied: premultiplied
+      });
       return r(p2);
     }
     function steps(c12, c22) {
@@ -16427,9 +16580,9 @@
       }
       var _options = options, maxDeltaE = _options.maxDeltaE, deltaEMethod = _options.deltaEMethod, _options$steps = _options.steps, steps2 = _options$steps === void 0 ? 2 : _options$steps, _options$maxSteps = _options.maxSteps, maxSteps = _options$maxSteps === void 0 ? 1e3 : _options$maxSteps, rangeOptions = _objectWithoutProperties(_options, _excluded13);
       if (!colorRange) {
-        var _ref66 = [ getColor(c12), getColor(c22) ];
-        c12 = _ref66[0];
-        c22 = _ref66[1];
+        var _ref60 = [ getColor(c12), getColor(c22) ];
+        c12 = _ref60[0];
+        c22 = _ref60[1];
         colorRange = range(c12, c22, rangeOptions);
       }
       var totalDelta = deltaE(c12, c22);
@@ -16469,8 +16622,8 @@
             var prev = ret[_i16 - 1];
             var cur = ret[_i16];
             var p2 = (cur.p + prev.p) / 2;
-            var _color2 = colorRange(p2);
-            maxDelta = Math.max(maxDelta, deltaE(_color2, prev.color), deltaE(_color2, cur.color));
+            var _color = colorRange(p2);
+            maxDelta = Math.max(maxDelta, deltaE(_color, prev.color), deltaE(_color, cur.color));
             ret.splice(_i16, 0, {
               p: p2,
               color: colorRange(p2)
@@ -16512,12 +16665,7 @@
       if (space.coords.h && space.coords.h.type === 'angle') {
         var arc = options.hue = options.hue || 'shorter';
         var hue = [ space, 'h' ];
-        var _ref67 = [ get(color1, hue), get(color2, hue) ], \u03b81 = _ref67[0], \u03b82 = _ref67[1];
-        if (isNaN(\u03b81) && !isNaN(\u03b82)) {
-          \u03b81 = \u03b82;
-        } else if (isNaN(\u03b82) && !isNaN(\u03b81)) {
-          \u03b82 = \u03b81;
-        }
+        var _ref61 = [ get(color1, hue), get(color2, hue) ], \u03b81 = _ref61[0], \u03b82 = _ref61[1];
         var _adjust = adjust(arc, [ \u03b81, \u03b82 ]);
         var _adjust2 = _slicedToArray(_adjust, 2);
         \u03b81 = _adjust2[0];
@@ -16575,11 +16723,11 @@
     }
     var interpolation = Object.freeze({
       __proto__: null,
-      isRange: isRange,
       mix: mix,
+      steps: steps,
       range: range,
-      register: register,
-      steps: steps
+      isRange: isRange,
+      register: register
     });
     var HSL = new ColorSpace({
       id: 'hsl',
@@ -16622,13 +16770,6 @@
           }
           h = h * 60;
         }
-        if (s < 0) {
-          h += 180;
-          s = Math.abs(s);
-        }
-        if (h >= 360) {
-          h -= 360;
-        }
         return [ h, s * 100, l * 100 ];
       },
       toBase: function toBase(hsl) {
@@ -16648,6 +16789,7 @@
       },
       formats: {
         hsl: {
+          toGamut: true,
           coords: [ ' | ', '', '' ]
         },
         hsla: {
@@ -16692,8 +16834,7 @@
       },
       formats: {
         color: {
-          id: '--hsv',
-          coords: [ ' | ', ' | ', ' | ' ]
+          toGamut: true
         }
       }
     });
@@ -16735,7 +16876,8 @@
       },
       formats: {
         hwb: {
-          coords: [ ' | ', ' | ', ' | ' ]
+          toGamut: true,
+          coords: [ ' | ', '', '' ]
         }
       }
     });
@@ -16743,7 +16885,6 @@
     var fromXYZ_M$2 = [ [ 2.0415879038107465, -.5650069742788596, -.34473135077832956 ], [ -.9692436362808795, 1.8759675015077202, .04155505740717557 ], [ .013444280632031142, -.11836239223101838, 1.0151749943912054 ] ];
     var A98Linear = new RGBColorSpace({
       id: 'a98rgb-linear',
-      cssId: '--a98-rgb-linear',
       name: 'Linear Adobe\xae 98 RGB compatible',
       white: 'D65',
       toXYZ_M: toXYZ_M$2,
@@ -16751,7 +16892,6 @@
     });
     var a98rgb = new RGBColorSpace({
       id: 'a98rgb',
-      cssId: 'a98-rgb',
       name: 'Adobe\xae 98 RGB compatible',
       base: A98Linear,
       toBase: function toBase(RGB) {
@@ -16763,13 +16903,17 @@
         return RGB.map(function(val) {
           return Math.pow(Math.abs(val), 256 / 563) * Math.sign(val);
         });
+      },
+      formats: {
+        color: {
+          id: 'a98-rgb'
+        }
       }
     });
-    var toXYZ_M$1 = [ [ .7977666449006423, .13518129740053308, .0313477341283922 ], [ .2880748288194013, .711835234241873, 8993693872564e-17 ], [ 0, 0, .8251046025104602 ] ];
-    var fromXYZ_M$1 = [ [ 1.3457868816471583, -.25557208737979464, -.05110186497554526 ], [ -.5446307051249019, 1.5082477428451468, .02052744743642139 ], [ 0, 0, 1.2119675456389452 ] ];
+    var toXYZ_M$1 = [ [ .7977604896723027, .13518583717574031, .0313493495815248 ], [ .2880711282292934, .7118432178101014, 8565396060525902e-20 ], [ 0, 0, .8251046025104601 ] ];
+    var fromXYZ_M$1 = [ [ 1.3457989731028281, -.25558010007997534, -.05110628506753401 ], [ -.5446224939028347, 1.5082327413132781, .02053603239147973 ], [ 0, 0, 1.2119675456389454 ] ];
     var ProPhotoLinear = new RGBColorSpace({
       id: 'prophoto-linear',
-      cssId: '--prophoto-rgb-linear',
       name: 'Linear ProPhoto',
       white: 'D50',
       base: XYZ_D50,
@@ -16780,7 +16924,6 @@
     var Et2 = 16 / 512;
     var prophoto = new RGBColorSpace({
       id: 'prophoto',
-      cssId: 'prophoto-rgb',
       name: 'ProPhoto',
       base: ProPhotoLinear,
       toBase: function toBase(RGB) {
@@ -16792,11 +16935,16 @@
         return RGB.map(function(v) {
           return v >= Et ? Math.pow(v, 1 / 1.8) : 16 * v;
         });
+      },
+      formats: {
+        color: {
+          id: 'prophoto-rgb'
+        }
       }
     });
     var oklch = new ColorSpace({
       id: 'oklch',
-      name: 'Oklch',
+      name: 'OKLCh',
       coords: {
         l: {
           refRange: [ 0, 1 ],
@@ -16839,301 +16987,7 @@
       },
       formats: {
         oklch: {
-          coords: [ ' | ', ' | [0,1]', ' | ' ]
-        }
-      }
-    });
-    var white = WHITES.D65;
-    var \u03b5$2 = 216 / 24389;
-    var \u03ba$1 = 24389 / 27;
-    var _uv = uv({
-      space: xyz_d65,
-      coords: white
-    }), _uv2 = _slicedToArray(_uv, 2), U_PRIME_WHITE = _uv2[0], V_PRIME_WHITE = _uv2[1];
-    var Luv = new ColorSpace({
-      id: 'luv',
-      name: 'Luv',
-      coords: {
-        l: {
-          refRange: [ 0, 100 ],
-          name: 'Lightness'
-        },
-        u: {
-          refRange: [ -215, 215 ]
-        },
-        v: {
-          refRange: [ -215, 215 ]
-        }
-      },
-      white: white,
-      base: xyz_d65,
-      fromBase: function fromBase(XYZ) {
-        var xyz = [ skipNone(XYZ[0]), skipNone(XYZ[1]), skipNone(XYZ[2]) ];
-        var y = xyz[1];
-        var _uv3 = uv({
-          space: xyz_d65,
-          coords: xyz
-        }), _uv4 = _slicedToArray(_uv3, 2), up = _uv4[0], vp = _uv4[1];
-        if (!Number.isFinite(up) || !Number.isFinite(vp)) {
-          return [ 0, 0, 0 ];
-        }
-        var L = y <= \u03b5$2 ? \u03ba$1 * y : 116 * Math.cbrt(y) - 16;
-        return [ L, 13 * L * (up - U_PRIME_WHITE), 13 * L * (vp - V_PRIME_WHITE) ];
-      },
-      toBase: function toBase(Luv2) {
-        var _Luv = _slicedToArray(Luv2, 3), L = _Luv[0], u = _Luv[1], v = _Luv[2];
-        if (L === 0 || isNone(L)) {
-          return [ 0, 0, 0 ];
-        }
-        u = skipNone(u);
-        v = skipNone(v);
-        var up = u / (13 * L) + U_PRIME_WHITE;
-        var vp = v / (13 * L) + V_PRIME_WHITE;
-        var y = L <= 8 ? L / \u03ba$1 : Math.pow((L + 16) / 116, 3);
-        return [ y * (9 * up / (4 * vp)), y, y * ((12 - 3 * up - 20 * vp) / (4 * vp)) ];
-      },
-      formats: {
-        color: {
-          id: '--luv',
-          coords: [ ' | ', ' | [-1,1]', ' | [-1,1]' ]
-        }
-      }
-    });
-    var LCHuv = new ColorSpace({
-      id: 'lchuv',
-      name: 'LChuv',
-      coords: {
-        l: {
-          refRange: [ 0, 100 ],
-          name: 'Lightness'
-        },
-        c: {
-          refRange: [ 0, 220 ],
-          name: 'Chroma'
-        },
-        h: {
-          refRange: [ 0, 360 ],
-          type: 'angle',
-          name: 'Hue'
-        }
-      },
-      base: Luv,
-      fromBase: function fromBase(Luv2) {
-        var _Luv2 = _slicedToArray(Luv2, 3), L = _Luv2[0], u = _Luv2[1], v = _Luv2[2];
-        var hue;
-        var \u03b52 = .02;
-        if (Math.abs(u) < \u03b52 && Math.abs(v) < \u03b52) {
-          hue = NaN;
-        } else {
-          hue = Math.atan2(v, u) * 180 / Math.PI;
-        }
-        return [ L, Math.sqrt(Math.pow(u, 2) + Math.pow(v, 2)), constrain(hue) ];
-      },
-      toBase: function toBase(LCH) {
-        var _LCH2 = _slicedToArray(LCH, 3), Lightness = _LCH2[0], Chroma = _LCH2[1], Hue = _LCH2[2];
-        if (Chroma < 0) {
-          Chroma = 0;
-        }
-        if (isNaN(Hue)) {
-          Hue = 0;
-        }
-        return [ Lightness, Chroma * Math.cos(Hue * Math.PI / 180), Chroma * Math.sin(Hue * Math.PI / 180) ];
-      },
-      formats: {
-        color: {
-          id: '--lchuv',
-          coords: [ ' | ', ' | ', ' | ' ]
-        }
-      }
-    });
-    var \u03b5$1 = 216 / 24389;
-    var \u03ba = 24389 / 27;
-    var m_r0 = fromXYZ_M$3[0][0];
-    var m_r1 = fromXYZ_M$3[0][1];
-    var m_r2 = fromXYZ_M$3[0][2];
-    var m_g0 = fromXYZ_M$3[1][0];
-    var m_g1 = fromXYZ_M$3[1][1];
-    var m_g2 = fromXYZ_M$3[1][2];
-    var m_b0 = fromXYZ_M$3[2][0];
-    var m_b1 = fromXYZ_M$3[2][1];
-    var m_b2 = fromXYZ_M$3[2][2];
-    function distanceFromOriginAngle(slope, intercept, angle) {
-      var d2 = intercept / (Math.sin(angle) - slope * Math.cos(angle));
-      return d2 < 0 ? Infinity : d2;
-    }
-    function calculateBoundingLines(l) {
-      var sub1 = Math.pow(l + 16, 3) / 1560896;
-      var sub2 = sub1 > \u03b5$1 ? sub1 : l / \u03ba;
-      var s1r = sub2 * (284517 * m_r0 - 94839 * m_r2);
-      var s2r = sub2 * (838422 * m_r2 + 769860 * m_r1 + 731718 * m_r0);
-      var s3r = sub2 * (632260 * m_r2 - 126452 * m_r1);
-      var s1g = sub2 * (284517 * m_g0 - 94839 * m_g2);
-      var s2g = sub2 * (838422 * m_g2 + 769860 * m_g1 + 731718 * m_g0);
-      var s3g = sub2 * (632260 * m_g2 - 126452 * m_g1);
-      var s1b = sub2 * (284517 * m_b0 - 94839 * m_b2);
-      var s2b = sub2 * (838422 * m_b2 + 769860 * m_b1 + 731718 * m_b0);
-      var s3b = sub2 * (632260 * m_b2 - 126452 * m_b1);
-      return {
-        r0s: s1r / s3r,
-        r0i: s2r * l / s3r,
-        r1s: s1r / (s3r + 126452),
-        r1i: (s2r - 769860) * l / (s3r + 126452),
-        g0s: s1g / s3g,
-        g0i: s2g * l / s3g,
-        g1s: s1g / (s3g + 126452),
-        g1i: (s2g - 769860) * l / (s3g + 126452),
-        b0s: s1b / s3b,
-        b0i: s2b * l / s3b,
-        b1s: s1b / (s3b + 126452),
-        b1i: (s2b - 769860) * l / (s3b + 126452)
-      };
-    }
-    function calcMaxChromaHsluv(lines, h) {
-      var hueRad = h / 360 * Math.PI * 2;
-      var r0 = distanceFromOriginAngle(lines.r0s, lines.r0i, hueRad);
-      var r1 = distanceFromOriginAngle(lines.r1s, lines.r1i, hueRad);
-      var g0 = distanceFromOriginAngle(lines.g0s, lines.g0i, hueRad);
-      var g1 = distanceFromOriginAngle(lines.g1s, lines.g1i, hueRad);
-      var b0 = distanceFromOriginAngle(lines.b0s, lines.b0i, hueRad);
-      var b1 = distanceFromOriginAngle(lines.b1s, lines.b1i, hueRad);
-      return Math.min(r0, r1, g0, g1, b0, b1);
-    }
-    var hsluv = new ColorSpace({
-      id: 'hsluv',
-      name: 'HSLuv',
-      coords: {
-        h: {
-          refRange: [ 0, 360 ],
-          type: 'angle',
-          name: 'Hue'
-        },
-        s: {
-          range: [ 0, 100 ],
-          name: 'Saturation'
-        },
-        l: {
-          range: [ 0, 100 ],
-          name: 'Lightness'
-        }
-      },
-      base: LCHuv,
-      gamutSpace: sRGB,
-      fromBase: function fromBase(lch2) {
-        var _ref68 = [ skipNone(lch2[0]), skipNone(lch2[1]), skipNone(lch2[2]) ], l = _ref68[0], c4 = _ref68[1], h = _ref68[2];
-        var s;
-        if (l > 99.9999999) {
-          s = 0;
-          l = 100;
-        } else if (l < 1e-8) {
-          s = 0;
-          l = 0;
-        } else {
-          var lines = calculateBoundingLines(l);
-          var max2 = calcMaxChromaHsluv(lines, h);
-          s = c4 / max2 * 100;
-        }
-        return [ h, s, l ];
-      },
-      toBase: function toBase(hsl) {
-        var _ref69 = [ skipNone(hsl[0]), skipNone(hsl[1]), skipNone(hsl[2]) ], h = _ref69[0], s = _ref69[1], l = _ref69[2];
-        var c4;
-        if (l > 99.9999999) {
-          l = 100;
-          c4 = 0;
-        } else if (l < 1e-8) {
-          l = 0;
-          c4 = 0;
-        } else {
-          var lines = calculateBoundingLines(l);
-          var max2 = calcMaxChromaHsluv(lines, h);
-          c4 = max2 / 100 * s;
-        }
-        return [ l, c4, h ];
-      },
-      formats: {
-        color: {
-          id: '--hsluv',
-          coords: [ ' | ', ' | ', ' | ' ]
-        }
-      }
-    });
-    fromXYZ_M$3[0][0];
-    fromXYZ_M$3[0][1];
-    fromXYZ_M$3[0][2];
-    fromXYZ_M$3[1][0];
-    fromXYZ_M$3[1][1];
-    fromXYZ_M$3[1][2];
-    fromXYZ_M$3[2][0];
-    fromXYZ_M$3[2][1];
-    fromXYZ_M$3[2][2];
-    function distanceFromOrigin(slope, intercept) {
-      return Math.abs(intercept) / Math.sqrt(Math.pow(slope, 2) + 1);
-    }
-    function calcMaxChromaHpluv(lines) {
-      var r0 = distanceFromOrigin(lines.r0s, lines.r0i);
-      var r1 = distanceFromOrigin(lines.r1s, lines.r1i);
-      var g0 = distanceFromOrigin(lines.g0s, lines.g0i);
-      var g1 = distanceFromOrigin(lines.g1s, lines.g1i);
-      var b0 = distanceFromOrigin(lines.b0s, lines.b0i);
-      var b1 = distanceFromOrigin(lines.b1s, lines.b1i);
-      return Math.min(r0, r1, g0, g1, b0, b1);
-    }
-    var hpluv = new ColorSpace({
-      id: 'hpluv',
-      name: 'HPLuv',
-      coords: {
-        h: {
-          refRange: [ 0, 360 ],
-          type: 'angle',
-          name: 'Hue'
-        },
-        s: {
-          range: [ 0, 100 ],
-          name: 'Saturation'
-        },
-        l: {
-          range: [ 0, 100 ],
-          name: 'Lightness'
-        }
-      },
-      base: LCHuv,
-      gamutSpace: 'self',
-      fromBase: function fromBase(lch2) {
-        var _ref70 = [ skipNone(lch2[0]), skipNone(lch2[1]), skipNone(lch2[2]) ], l = _ref70[0], c4 = _ref70[1], h = _ref70[2];
-        var s;
-        if (l > 99.9999999) {
-          s = 0;
-          l = 100;
-        } else if (l < 1e-8) {
-          s = 0;
-          l = 0;
-        } else {
-          var lines = calculateBoundingLines(l);
-          var max2 = calcMaxChromaHpluv(lines);
-          s = c4 / max2 * 100;
-        }
-        return [ h, s, l ];
-      },
-      toBase: function toBase(hsl) {
-        var _ref71 = [ skipNone(hsl[0]), skipNone(hsl[1]), skipNone(hsl[2]) ], h = _ref71[0], s = _ref71[1], l = _ref71[2];
-        var c4;
-        if (l > 99.9999999) {
-          l = 100;
-          c4 = 0;
-        } else if (l < 1e-8) {
-          l = 0;
-          c4 = 0;
-        } else {
-          var lines = calculateBoundingLines(l);
-          var max2 = calcMaxChromaHpluv(lines);
-          c4 = max2 / 100 * s;
-        }
-        return [ l, c4, h ];
-      },
-      formats: {
-        color: {
-          id: '--hpluv',
-          coords: [ ' | ', ' | ', ' | ' ]
+          coords: [ ' | ', '', ' | ' ]
         }
       }
     });
@@ -17147,7 +17001,6 @@
     var c3 = 2392 / Math.pow(2, 7);
     var rec2100Pq = new RGBColorSpace({
       id: 'rec2100pq',
-      cssId: 'rec2100-pq',
       name: 'REC.2100-PQ',
       base: REC2020Linear,
       toBase: function toBase(RGB) {
@@ -17163,6 +17016,11 @@
           var denom = 1 + c3 * Math.pow(x, n);
           return Math.pow(num / denom, m);
         });
+      },
+      formats: {
+        color: {
+          id: 'rec2100-pq'
+        }
       }
     });
     var a = .17883277;
@@ -17171,7 +17029,7 @@
     var scale = 3.7743;
     var rec2100Hlg = new RGBColorSpace({
       id: 'rec2100hlg',
-      cssId: 'rec2100-hlg',
+      cssid: 'rec2100-hlg',
       name: 'REC.2100-HLG',
       referred: 'scene',
       base: REC2020Linear,
@@ -17180,7 +17038,7 @@
           if (val <= .5) {
             return Math.pow(val, 2) / 3 * scale;
           }
-          return (Math.exp((val - c) / a) + b) / 12 * scale;
+          return Math.exp((val - c) / a + b) / 12 * scale;
         });
       },
       fromBase: function fromBase(RGB) {
@@ -17191,6 +17049,11 @@
           }
           return a * Math.log(12 * val - b) + c;
         });
+      },
+      formats: {
+        color: {
+          id: 'rec2100-hlg'
+        }
       }
     });
     var CATs = {};
@@ -17204,8 +17067,8 @@
         env.M = adapt(env.W1, env.W2, env.options.method);
       }
     });
-    function defineCAT(_ref72) {
-      var id = _ref72.id, toCone_M = _ref72.toCone_M, fromCone_M = _ref72.fromCone_M;
+    function defineCAT(_ref62) {
+      var id = _ref62.id, toCone_M = _ref62.toCone_M, fromCone_M = _ref62.fromCone_M;
       CATs[id] = arguments[0];
     }
     function adapt(W1, W2) {
@@ -17221,22 +17084,22 @@
     defineCAT({
       id: 'von Kries',
       toCone_M: [ [ .40024, .7076, -.08081 ], [ -.2263, 1.16532, .0457 ], [ 0, 0, .91822 ] ],
-      fromCone_M: [ [ 1.8599363874558397, -1.1293816185800916, .21989740959619328 ], [ .3611914362417676, .6388124632850422, -6370596838649899e-21 ], [ 0, 0, 1.0890636230968613 ] ]
+      fromCone_M: [ [ 1.8599364, -1.1293816, .2198974 ], [ .3611914, .6388125, -64e-7 ], [ 0, 0, 1.0890636 ] ]
     });
     defineCAT({
       id: 'Bradford',
       toCone_M: [ [ .8951, .2664, -.1614 ], [ -.7502, 1.7135, .0367 ], [ .0389, -.0685, 1.0296 ] ],
-      fromCone_M: [ [ .9869929054667121, -.14705425642099013, .15996265166373122 ], [ .4323052697233945, .5183602715367774, .049291228212855594 ], [ -.00852866457517732, .04004282165408486, .96848669578755 ] ]
+      fromCone_M: [ [ .9869929, -.1470543, .1599627 ], [ .4323053, .5183603, .0492912 ], [ -.0085287, .0400428, .9684867 ] ]
     });
     defineCAT({
       id: 'CAT02',
       toCone_M: [ [ .7328, .4296, -.1624 ], [ -.7036, 1.6975, .0061 ], [ .003, .0136, .9834 ] ],
-      fromCone_M: [ [ 1.0961238208355142, -.27886900021828726, .18274517938277307 ], [ .4543690419753592, .4735331543074117, .07209780371722911 ], [ -.009627608738429355, -.00569803121611342, 1.0153256399545427 ] ]
+      fromCone_M: [ [ 1.0961238, -.278869, .1827452 ], [ .454369, .4735332, .0720978 ], [ -.0096276, -.005698, 1.0153256 ] ]
     });
     defineCAT({
       id: 'CAT16',
       toCone_M: [ [ .401288, .650173, -.051461 ], [ -.250268, 1.204414, .045854 ], [ -.002079, .048952, .953127 ] ],
-      fromCone_M: [ [ 1.862067855087233, -1.0112546305316845, .14918677544445172 ], [ .3875265432361372, .6214474419314753, -.008973985167612521 ], [ -.01584149884933386, -.03412293802851557, 1.0499644368778496 ] ]
+      fromCone_M: [ [ 1.862067855087233, -1.011254630531685, .1491867754444518 ], [ .3875265432361372, .6214474419314753, -.008973985167612518 ], [ -.01584149884933386, -.03412293802851557, 1.04996443687785 ] ]
     });
     Object.assign(WHITES, {
       A: [ 1.0985, 1, .35585 ],
@@ -17253,7 +17116,6 @@
     var fromXYZ_M = [ [ 1.6410233796943257, -.32480329418479, -.23642469523761225 ], [ -.6636628587229829, 1.6153315916573379, .016756347685530137 ], [ .011721894328375376, -.008284441996237409, .9883948585390215 ] ];
     var ACEScg = new RGBColorSpace({
       id: 'acescg',
-      cssId: '--acescg',
       name: 'ACEScg',
       coords: {
         r: {
@@ -17272,14 +17134,16 @@
       referred: 'scene',
       white: WHITES.ACES,
       toXYZ_M: toXYZ_M,
-      fromXYZ_M: fromXYZ_M
+      fromXYZ_M: fromXYZ_M,
+      formats: {
+        color: {}
+      }
     });
     var \u03b5 = Math.pow(2, -16);
     var ACES_min_nonzero = -.35828683;
     var ACES_cc_max = (Math.log2(65504) + 9.72) / 17.52;
     var acescc = new RGBColorSpace({
       id: 'acescc',
-      cssId: '--acescc',
       name: 'ACEScc',
       coords: {
         r: {
@@ -17319,49 +17183,47 @@
             return (Math.log2(val) + 9.72) / 17.52;
           }
         });
+      },
+      formats: {
+        color: {}
       }
     });
     var spaces = Object.freeze({
       __proto__: null,
-      A98RGB: a98rgb,
-      A98RGB_Linear: A98Linear,
-      ACEScc: acescc,
-      ACEScg: ACEScg,
-      CAM16_JMh: cam16,
-      HCT: hct,
-      HPLuv: hpluv,
+      XYZ_D65: XYZ_D65,
+      XYZ_D50: XYZ_D50,
+      XYZ_ABS_D65: XYZ_Abs_D65,
+      Lab_D65: lab_d65,
+      Lab: lab,
+      LCH: lch,
+      sRGB_Linear: sRGBLinear,
+      sRGB: sRGB,
       HSL: HSL,
-      HSLuv: hsluv,
-      HSV: HSV,
       HWB: hwb,
-      ICTCP: ictcp,
-      JzCzHz: jzczhz,
-      Jzazbz: Jzazbz,
-      LCH: lch,
-      LCHuv: LCHuv,
-      Lab: lab,
-      Lab_D65: lab_d65,
-      Luv: Luv,
-      OKLCH: oklch,
-      OKLab: OKLab,
-      P3: P3,
+      HSV: HSV,
       P3_Linear: P3Linear,
-      ProPhoto: prophoto,
+      P3: P3,
+      A98RGB_Linear: A98Linear,
+      A98RGB: a98rgb,
       ProPhoto_Linear: ProPhotoLinear,
-      REC_2020: REC2020,
+      ProPhoto: prophoto,
       REC_2020_Linear: REC2020Linear,
-      REC_2100_HLG: rec2100Hlg,
+      REC_2020: REC2020,
+      OKLab: OKLab,
+      OKLCH: oklch,
+      Jzazbz: Jzazbz,
+      JzCzHz: jzczhz,
+      ICTCP: ictcp,
       REC_2100_PQ: rec2100Pq,
-      XYZ_ABS_D65: XYZ_Abs_D65,
-      XYZ_D50: XYZ_D50,
-      XYZ_D65: xyz_d65,
-      sRGB: sRGB,
-      sRGB_Linear: sRGBLinear
+      REC_2100_HLG: rec2100Hlg,
+      ACEScg: ACEScg,
+      ACEScc: acescc
     });
-    var Color = function() {
+    var Color = (_space = new WeakMap(), function() {
       function Color() {
         var _this2 = this;
         _classCallCheck(this, Color);
+        _classPrivateFieldInitSpec(this, _space, void 0);
         var color;
         for (var _len3 = arguments.length, args = new Array(_len3), _key3 = 0; _key3 < _len3; _key3++) {
           args[_key3] = arguments[_key3];
@@ -17379,20 +17241,15 @@
           coords = args[1];
           alpha = args[2];
         }
-        Object.defineProperty(this, 'space', {
-          value: ColorSpace.get(space),
-          writable: false,
-          enumerable: true,
-          configurable: true
-        });
+        _classPrivateFieldSet(_space, this, ColorSpace.get(space));
         this.coords = coords ? coords.slice() : [ 0, 0, 0 ];
-        this.alpha = alpha > 1 || alpha === void 0 ? 1 : alpha < 0 ? 0 : alpha;
+        this.alpha = alpha < 1 ? alpha : 1;
         for (var _i17 = 0; _i17 < this.coords.length; _i17++) {
           if (this.coords[_i17] === 'NaN') {
             this.coords[_i17] = NaN;
           }
         }
-        var _loop5 = function _loop5(id) {
+        var _loop7 = function _loop7(id) {
           Object.defineProperty(_this2, id, {
             get: function get() {
               return _this2.get(id);
@@ -17402,14 +17259,19 @@
             }
           });
         };
-        for (var id in this.space.coords) {
-          _loop5(id);
+        for (var id in _classPrivateFieldGet(_space, this).coords) {
+          _loop7(id);
         }
       }
-      _createClass(Color, [ {
+      return _createClass(Color, [ {
+        key: 'space',
+        get: function get() {
+          return _classPrivateFieldGet(_space, this);
+        }
+      }, {
         key: 'spaceId',
         get: function get() {
-          return this.space.id;
+          return _classPrivateFieldGet(_space, this).id;
         }
       }, {
         key: 'clone',
@@ -17500,8 +17362,7 @@
           }
         }
       } ]);
-      return Color;
-    }();
+    }());
     Color.defineFunctions({
       get: get,
       getAll: getAll,
@@ -17538,6 +17399,10 @@
       });
     });
     function addSpaceAccessors(id, space) {
+      Object.keys(space.coords);
+      Object.values(space.coords).map(function(c4) {
+        return c4.name;
+      });
       var propId = id.replace(/-/g, '_');
       Object.defineProperty(Color.prototype, propId, {
         get: function get() {
@@ -17587,9 +17452,6 @@
     Color.extend({
       deltaE: deltaE
     });
-    Object.assign(Color, {
-      deltaEMethods: deltaEMethods
-    });
     Color.extend(variations);
     Color.extend({
       contrast: contrast
@@ -17598,38 +17460,23 @@
     Color.extend(luminance);
     Color.extend(interpolation);
     Color.extend(contrastMethods);
+    var import_from2 = __toModule(require_from4());
     import_dot['default'].templateSettings.strip = false;
     var hexRegex = /^#[0-9a-f]{3,8}$/i;
+    var hslRegex = /hsl\(\s*([-\d.]+)(rad|turn)/;
     var Color2 = (_r = new WeakMap(), _g = new WeakMap(), _b = new WeakMap(), _red = new WeakMap(), 
-    _green = new WeakMap(), _blue = new WeakMap(), _add = new WeakSet(), function() {
+    _green = new WeakMap(), _blue = new WeakMap(), _Class3_brand = new WeakSet(), 
+    function() {
       function Color2(red, green, blue) {
         var alpha = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 1;
         _classCallCheck(this, Color2);
-        _classPrivateMethodInitSpec(this, _add);
-        _classPrivateFieldInitSpec(this, _r, {
-          writable: true,
-          value: void 0
-        });
-        _classPrivateFieldInitSpec(this, _g, {
-          writable: true,
-          value: void 0
-        });
-        _classPrivateFieldInitSpec(this, _b, {
-          writable: true,
-          value: void 0
-        });
-        _classPrivateFieldInitSpec(this, _red, {
-          writable: true,
-          value: void 0
-        });
-        _classPrivateFieldInitSpec(this, _green, {
-          writable: true,
-          value: void 0
-        });
-        _classPrivateFieldInitSpec(this, _blue, {
-          writable: true,
-          value: void 0
-        });
+        _classPrivateMethodInitSpec(this, _Class3_brand);
+        _classPrivateFieldInitSpec(this, _r, void 0);
+        _classPrivateFieldInitSpec(this, _g, void 0);
+        _classPrivateFieldInitSpec(this, _b, void 0);
+        _classPrivateFieldInitSpec(this, _red, void 0);
+        _classPrivateFieldInitSpec(this, _green, void 0);
+        _classPrivateFieldInitSpec(this, _blue, void 0);
         if (red instanceof Color2) {
           var r = red.r, g2 = red.g, b2 = red.b;
           this.r = r;
@@ -17643,59 +17490,59 @@
         this.blue = blue;
         this.alpha = alpha;
       }
-      _createClass(Color2, [ {
+      return _createClass(Color2, [ {
         key: 'r',
         get: function get() {
-          return _classPrivateFieldGet(this, _r);
+          return _classPrivateFieldGet(_r, this);
         },
         set: function set(value) {
-          _classPrivateFieldSet(this, _r, value);
-          _classPrivateFieldSet(this, _red, Math.round(clamp2(value, 0, 1) * 255));
+          _classPrivateFieldSet(_r, this, value);
+          _classPrivateFieldSet(_red, this, Math.round(clamp(value, 0, 1) * 255));
         }
       }, {
         key: 'g',
         get: function get() {
-          return _classPrivateFieldGet(this, _g);
+          return _classPrivateFieldGet(_g, this);
         },
         set: function set(value) {
-          _classPrivateFieldSet(this, _g, value);
-          _classPrivateFieldSet(this, _green, Math.round(clamp2(value, 0, 1) * 255));
+          _classPrivateFieldSet(_g, this, value);
+          _classPrivateFieldSet(_green, this, Math.round(clamp(value, 0, 1) * 255));
         }
       }, {
         key: 'b',
         get: function get() {
-          return _classPrivateFieldGet(this, _b);
+          return _classPrivateFieldGet(_b, this);
         },
         set: function set(value) {
-          _classPrivateFieldSet(this, _b, value);
-          _classPrivateFieldSet(this, _blue, Math.round(clamp2(value, 0, 1) * 255));
+          _classPrivateFieldSet(_b, this, value);
+          _classPrivateFieldSet(_blue, this, Math.round(clamp(value, 0, 1) * 255));
         }
       }, {
         key: 'red',
         get: function get() {
-          return _classPrivateFieldGet(this, _red);
+          return _classPrivateFieldGet(_red, this);
         },
         set: function set(value) {
-          _classPrivateFieldSet(this, _r, value / 255);
-          _classPrivateFieldSet(this, _red, clamp2(value, 0, 255));
+          _classPrivateFieldSet(_r, this, value / 255);
+          _classPrivateFieldSet(_red, this, clamp(value, 0, 255));
         }
       }, {
         key: 'green',
         get: function get() {
-          return _classPrivateFieldGet(this, _green);
+          return _classPrivateFieldGet(_green, this);
         },
         set: function set(value) {
-          _classPrivateFieldSet(this, _g, value / 255);
-          _classPrivateFieldSet(this, _green, clamp2(value, 0, 255));
+          _classPrivateFieldSet(_g, this, value / 255);
+          _classPrivateFieldSet(_green, this, clamp(value, 0, 255));
         }
       }, {
         key: 'blue',
         get: function get() {
-          return _classPrivateFieldGet(this, _blue);
+          return _classPrivateFieldGet(_blue, this);
         },
         set: function set(value) {
-          _classPrivateFieldSet(this, _b, value / 255);
-          _classPrivateFieldSet(this, _blue, clamp2(value, 0, 255));
+          _classPrivateFieldSet(_b, this, value / 255);
+          _classPrivateFieldSet(_blue, this, clamp(value, 0, 255));
         }
       }, {
         key: 'toHexString',
@@ -17719,12 +17566,31 @@
       }, {
         key: 'parseString',
         value: function parseString(colorString) {
+          colorString = colorString.replace(hslRegex, function(match, angle, unit) {
+            var value = angle + unit;
+            switch (unit) {
+             case 'rad':
+              return match.replace(value, radToDeg(angle));
+
+             case 'turn':
+              return match.replace(value, turnToDeg(angle));
+            }
+          });
           try {
-            var _color3 = new Color(colorString).to('srgb');
-            this.r = _color3.r;
-            this.g = _color3.g;
-            this.b = _color3.b;
-            this.alpha = +_color3.alpha;
+            var prototypeArrayFrom;
+            if ('Prototype' in window && 'Version' in window.Prototype) {
+              prototypeArrayFrom = Array.from;
+              Array.from = import_from2['default'];
+            }
+            var _color2 = new Color(colorString).to('srgb');
+            if (prototypeArrayFrom) {
+              Array.from = prototypeArrayFrom;
+              prototypeArrayFrom = null;
+            }
+            this.r = _color2.r;
+            this.g = _color2.g;
+            this.b = _color2.b;
+            this.alpha = +_color2.alpha;
           } catch (err2) {
             throw new Error('Unable to parse color "'.concat(colorString, '"'));
           }
@@ -17766,7 +17632,7 @@
         key: 'setLuminosity',
         value: function setLuminosity(L) {
           var d2 = L - this.getLuminosity();
-          return _classPrivateMethodGet(this, _add, _add2).call(this, d2).clip();
+          return _assertClassBrand(_Class3_brand, this, _add).call(this, d2).clip();
         }
       }, {
         key: 'getSaturation',
@@ -17822,9 +17688,8 @@
           return C;
         }
       } ]);
-      return Color2;
     }());
-    function _add2(value) {
+    function _add(value) {
       var C = new Color2(this);
       C.r += value;
       C.g += value;
@@ -17832,9 +17697,15 @@
       return C;
     }
     var color_default = Color2;
-    function clamp2(value, min, max2) {
+    function clamp(value, min, max2) {
       return Math.min(Math.max(min, value), max2);
     }
+    function radToDeg(rad) {
+      return rad * 180 / Math.PI;
+    }
+    function turnToDeg(turn) {
+      return turn * 360;
+    }
     function getOwnBackgroundColor(elmStyle) {
       var bgColor = new color_default();
       bgColor.parseString(elmStyle.getPropertyValue('background-color'));
@@ -17914,8 +17785,8 @@
       if (!refs || !refs.length) {
         return false;
       }
-      return refs.some(function(_ref73) {
-        var actualNode = _ref73.actualNode;
+      return refs.some(function(_ref63) {
+        var actualNode = _ref63.actualNode;
         return isVisible(actualNode, screenReader, recursed);
       });
     }
@@ -17927,7 +17798,7 @@
       var vNode = el instanceof abstract_virtual_node_default ? el : get_node_from_tree_default(el);
       el = vNode ? vNode.actualNode : el;
       var cacheName = '_isVisible' + (screenReader ? 'ScreenReader' : '');
-      var _ref74 = (_window$Node2 = window.Node) !== null && _window$Node2 !== void 0 ? _window$Node2 : {}, DOCUMENT_NODE = _ref74.DOCUMENT_NODE, DOCUMENT_FRAGMENT_NODE = _ref74.DOCUMENT_FRAGMENT_NODE;
+      var _ref64 = (_window$Node2 = window.Node) !== null && _window$Node2 !== void 0 ? _window$Node2 : {}, DOCUMENT_NODE = _ref64.DOCUMENT_NODE, DOCUMENT_FRAGMENT_NODE = _ref64.DOCUMENT_FRAGMENT_NODE;
       var nodeType = vNode ? vNode.props.nodeType : el.nodeType;
       var nodeName2 = vNode ? vNode.props.nodeName : el.nodeName.toLowerCase();
       if (vNode && typeof vNode[cacheName] !== 'undefined') {
@@ -18173,7 +18044,6 @@
     var visually_overlaps_default = visuallyOverlaps;
     var nodeIndex2 = 0;
     var VirtualNode = function(_abstract_virtual_nod) {
-      _inherits(VirtualNode, _abstract_virtual_nod);
       function VirtualNode(node, parent, shadowId) {
         var _this4;
         _classCallCheck(this, VirtualNode);
@@ -18198,11 +18068,12 @@
           _this4._type = type2;
         }
         if (cache_default.get('nodeMap')) {
-          cache_default.get('nodeMap').set(node, _assertThisInitialized(_this4));
+          cache_default.get('nodeMap').set(node, _this4);
         }
         return _this4;
       }
-      _createClass(VirtualNode, [ {
+      _inherits(VirtualNode, _abstract_virtual_nod);
+      return _createClass(VirtualNode, [ {
         key: 'props',
         get: function get() {
           if (!this._cache.hasOwnProperty('props')) {
@@ -18303,7 +18174,6 @@
           return this._cache.boundingClientRect;
         }
       } ]);
-      return VirtualNode;
     }(abstract_virtual_node_default);
     var virtual_node_default = VirtualNode;
     function tokenList(str) {
@@ -18735,7 +18605,7 @@
         return {};
       }
       var navigator2 = win.navigator, innerHeight = win.innerHeight, innerWidth = win.innerWidth;
-      var _ref75 = getOrientation(win) || {}, angle = _ref75.angle, type2 = _ref75.type;
+      var _ref65 = getOrientation(win) || {}, angle = _ref65.angle, type2 = _ref65.type;
       return {
         userAgent: navigator2.userAgent,
         windowWidth: innerWidth,
@@ -18744,12 +18614,12 @@
         orientationType: type2
       };
     }
-    function getOrientation(_ref76) {
-      var screen = _ref76.screen;
+    function getOrientation(_ref66) {
+      var screen = _ref66.screen;
       return screen.orientation || screen.msOrientation || screen.mozOrientation;
     }
-    function createFrameContext(frame, _ref77) {
-      var focusable = _ref77.focusable, page = _ref77.page;
+    function createFrameContext(frame, _ref67) {
+      var focusable = _ref67.focusable, page = _ref67.page;
       return {
         node: frame,
         include: [],
@@ -18988,8 +18858,8 @@
       }
       context.frames.push(createFrameContext(frame, context));
     }
-    function isPageContext(_ref78) {
-      var include = _ref78.include;
+    function isPageContext(_ref68) {
+      var include = _ref68.include;
       return include.length === 1 && include[0].actualNode === document.documentElement;
     }
     function validateContext(context) {
@@ -18998,8 +18868,8 @@
         throw new Error('No elements found for include in ' + env + ' Context');
       }
     }
-    function getRootNode2(_ref79) {
-      var include = _ref79.include, exclude = _ref79.exclude;
+    function getRootNode2(_ref69) {
+      var include = _ref69.include, exclude = _ref69.exclude;
       var selectors = Array.from(include).concat(Array.from(exclude));
       for (var _i23 = 0; _i23 < selectors.length; _i23++) {
         var item = selectors[_i23];
@@ -19018,8 +18888,8 @@
         return [];
       }
       var _Context = new Context(context), frames = _Context.frames;
-      return frames.map(function(_ref80) {
-        var node = _ref80.node, frameContext = _objectWithoutProperties(_ref80, _excluded14);
+      return frames.map(function(_ref70) {
+        var node = _ref70.node, frameContext = _objectWithoutProperties(_ref70, _excluded14);
         frameContext.initiator = false;
         var frameSelector = _getAncestry(node);
         return {
@@ -19029,8 +18899,8 @@
       });
     }
     function _getRule(ruleId) {
-      var rule = axe._audit.rules.find(function(_ref81) {
-        var id = _ref81.id;
+      var rule = axe._audit.rules.find(function(_ref71) {
+        var id = _ref71.id;
         return id === ruleId;
       });
       if (!rule) {
@@ -19169,8 +19039,8 @@
       return !!standards_default.htmlElms[nodeName2];
     }
     var is_html_element_default = isHtmlElement;
-    function _isNodeInContext(node, _ref82) {
-      var _ref82$include = _ref82.include, include = _ref82$include === void 0 ? [] : _ref82$include, _ref82$exclude = _ref82.exclude, exclude = _ref82$exclude === void 0 ? [] : _ref82$exclude;
+    function _isNodeInContext(node, _ref72) {
+      var _ref72$include = _ref72.include, include = _ref72$include === void 0 ? [] : _ref72$include, _ref72$exclude = _ref72.exclude, exclude = _ref72$exclude === void 0 ? [] : _ref72$exclude;
       var filterInclude = include.filter(function(candidate) {
         return _contains(candidate, node);
       });
@@ -19526,8 +19396,8 @@
       return matchExpressions(domTree, expressions, filter);
     }
     var query_selector_all_filter_default = querySelectorAllFilter;
-    function preloadCssom(_ref83) {
-      var _ref83$treeRoot = _ref83.treeRoot, treeRoot = _ref83$treeRoot === void 0 ? axe._tree[0] : _ref83$treeRoot;
+    function preloadCssom(_ref73) {
+      var _ref73$treeRoot = _ref73.treeRoot, treeRoot = _ref73$treeRoot === void 0 ? axe._tree[0] : _ref73$treeRoot;
       var rootNodes = getAllRootNodesInTree(treeRoot);
       if (!rootNodes.length) {
         return Promise.resolve();
@@ -19557,8 +19427,8 @@
     }
     function getCssomForAllRootNodes(rootNodes, convertDataToStylesheet) {
       var promises = [];
-      rootNodes.forEach(function(_ref84, index) {
-        var rootNode = _ref84.rootNode, shadowId = _ref84.shadowId;
+      rootNodes.forEach(function(_ref74, index) {
+        var rootNode = _ref74.rootNode, shadowId = _ref74.shadowId;
         var sheets = getStylesheetsOfRootNode(rootNode, shadowId, convertDataToStylesheet);
         if (!sheets) {
           return Promise.all(promises);
@@ -19644,10 +19514,10 @@
         return true;
       });
     }
-    function preloadMedia(_ref85) {
-      var _ref85$treeRoot = _ref85.treeRoot, treeRoot = _ref85$treeRoot === void 0 ? axe._tree[0] : _ref85$treeRoot;
-      var mediaVirtualNodes = query_selector_all_filter_default(treeRoot, 'video, audio', function(_ref86) {
-        var actualNode = _ref86.actualNode;
+    function preloadMedia(_ref75) {
+      var _ref75$treeRoot = _ref75.treeRoot, treeRoot = _ref75$treeRoot === void 0 ? axe._tree[0] : _ref75$treeRoot;
+      var mediaVirtualNodes = query_selector_all_filter_default(treeRoot, 'video, audio', function(_ref76) {
+        var actualNode = _ref76.actualNode;
         if (actualNode.hasAttribute('src')) {
           return !!actualNode.getAttribute('src');
         }
@@ -19659,8 +19529,8 @@
         }
         return true;
       });
-      return Promise.all(mediaVirtualNodes.map(function(_ref87) {
-        var actualNode = _ref87.actualNode;
+      return Promise.all(mediaVirtualNodes.map(function(_ref77) {
+        var actualNode = _ref77.actualNode;
         return isMediaElementReady(actualNode);
       }));
     }
@@ -19963,8 +19833,8 @@
       }
     }
     function setScrollState(scrollState) {
-      scrollState.forEach(function(_ref89) {
-        var elm = _ref89.elm, top = _ref89.top, left = _ref89.left;
+      scrollState.forEach(function(_ref79) {
+        var elm = _ref79.elm, top = _ref79.top, left = _ref79.left;
         return setScroll(elm, top, left);
       });
     }
@@ -19992,8 +19862,8 @@
       }
       return selectAllRecursive(selectorArr, doc);
     }
-    function selectAllRecursive(_ref90, doc) {
-      var _ref91 = _toArray(_ref90), selectorStr = _ref91[0], restSelector = _ref91.slice(1);
+    function selectAllRecursive(_ref80, doc) {
+      var _ref81 = _toArray(_ref80), selectorStr = _ref81[0], restSelector = _ref81.slice(1);
       var elms = doc.querySelectorAll(selectorStr);
       if (restSelector.length === 0) {
         return Array.from(elms);
@@ -20050,7 +19920,6 @@
     }
     var valid_langs_default = isValidLang;
     var SerialVirtualNode = function(_abstract_virtual_nod2) {
-      _inherits(SerialVirtualNode, _abstract_virtual_nod2);
       function SerialVirtualNode(serialNode) {
         var _this6;
         _classCallCheck(this, SerialVirtualNode);
@@ -20059,7 +19928,8 @@
         _this6._attrs = normaliseAttrs(serialNode);
         return _this6;
       }
-      _createClass(SerialVirtualNode, [ {
+      _inherits(SerialVirtualNode, _abstract_virtual_nod2);
+      return _createClass(SerialVirtualNode, [ {
         key: 'props',
         get: function get() {
           return this._props;
@@ -20081,7 +19951,6 @@
           return Object.keys(this._attrs);
         }
       } ]);
-      return SerialVirtualNode;
     }(abstract_virtual_node_default);
     var nodeNamesToTypes = {
       '#cdata-section': 2,
@@ -20096,9 +19965,9 @@
       nodeTypeToName[nodeNamesToTypes[nodeName2]] = nodeName2;
     });
     function normaliseProps(serialNode) {
-      var _serialNode$nodeName, _ref92, _serialNode$nodeType;
+      var _serialNode$nodeName, _ref82, _serialNode$nodeType;
       var nodeName2 = (_serialNode$nodeName = serialNode.nodeName) !== null && _serialNode$nodeName !== void 0 ? _serialNode$nodeName : nodeTypeToName[serialNode.nodeType];
-      var nodeType = (_ref92 = (_serialNode$nodeType = serialNode.nodeType) !== null && _serialNode$nodeType !== void 0 ? _serialNode$nodeType : nodeNamesToTypes[serialNode.nodeName]) !== null && _ref92 !== void 0 ? _ref92 : 1;
+      var nodeType = (_ref82 = (_serialNode$nodeType = serialNode.nodeType) !== null && _serialNode$nodeType !== void 0 ? _serialNode$nodeType : nodeNamesToTypes[serialNode.nodeName]) !== null && _ref82 !== void 0 ? _ref82 : 1;
       assert_default(typeof nodeType === 'number', 'nodeType has to be a number, got \''.concat(nodeType, '\''));
       assert_default(typeof nodeName2 === 'string', 'nodeName has to be a string, got \''.concat(nodeName2, '\''));
       nodeName2 = nodeName2.toLowerCase();
@@ -20119,8 +19988,8 @@
       delete props.attributes;
       return Object.freeze(props);
     }
-    function normaliseAttrs(_ref93) {
-      var _ref93$attributes = _ref93.attributes, attributes2 = _ref93$attributes === void 0 ? {} : _ref93$attributes;
+    function normaliseAttrs(_ref83) {
+      var _ref83$attributes = _ref83.attributes, attributes2 = _ref83$attributes === void 0 ? {} : _ref83$attributes;
       var attrMap = {
         htmlFor: 'for',
         className: 'class'
@@ -22686,8 +22555,8 @@
       nodeName: [ 'abbr', 'address', 'canvas', 'div', 'p', 'pre', 'blockquote', 'ins', 'del', 'output', 'span', 'table', 'tbody', 'thead', 'tfoot', 'td', 'em', 'strong', 'small', 's', 'cite', 'q', 'dfn', 'abbr', 'time', 'code', 'var', 'samp', 'kbd', 'sub', 'sup', 'i', 'b', 'u', 'mark', 'ruby', 'rt', 'rp', 'bdi', 'bdo', 'br', 'wbr', 'th', 'tr' ]
     } ];
     lookupTable.evaluateRoleForElement = {
-      A: function A(_ref94) {
-        var node = _ref94.node, out = _ref94.out;
+      A: function A(_ref84) {
+        var node = _ref84.node, out = _ref84.out;
         if (node.namespaceURI === 'http://www.w3.org/2000/svg') {
           return true;
         }
@@ -22696,19 +22565,19 @@
         }
         return true;
       },
-      AREA: function AREA(_ref95) {
-        var node = _ref95.node;
+      AREA: function AREA(_ref85) {
+        var node = _ref85.node;
         return !node.href;
       },
-      BUTTON: function BUTTON(_ref96) {
-        var node = _ref96.node, role = _ref96.role, out = _ref96.out;
+      BUTTON: function BUTTON(_ref86) {
+        var node = _ref86.node, role = _ref86.role, out = _ref86.out;
         if (node.getAttribute('type') === 'menu') {
           return role === 'menuitem';
         }
         return out;
       },
-      IMG: function IMG(_ref97) {
-        var node = _ref97.node, role = _ref97.role, out = _ref97.out;
+      IMG: function IMG(_ref87) {
+        var node = _ref87.node, role = _ref87.role, out = _ref87.out;
         switch (node.alt) {
          case null:
           return out;
@@ -22720,8 +22589,8 @@
           return role !== 'presentation' && role !== 'none';
         }
       },
-      INPUT: function INPUT(_ref98) {
-        var node = _ref98.node, role = _ref98.role, out = _ref98.out;
+      INPUT: function INPUT(_ref88) {
+        var node = _ref88.node, role = _ref88.role, out = _ref88.out;
         switch (node.type) {
          case 'button':
          case 'image':
@@ -22751,32 +22620,32 @@
           return false;
         }
       },
-      LI: function LI(_ref99) {
-        var node = _ref99.node, out = _ref99.out;
+      LI: function LI(_ref89) {
+        var node = _ref89.node, out = _ref89.out;
         var hasImplicitListitemRole = axe.utils.matchesSelector(node, 'ol li, ul li');
         if (hasImplicitListitemRole) {
           return out;
         }
         return true;
       },
-      MENU: function MENU(_ref100) {
-        var node = _ref100.node;
+      MENU: function MENU(_ref90) {
+        var node = _ref90.node;
         if (node.getAttribute('type') === 'context') {
           return false;
         }
         return true;
       },
-      OPTION: function OPTION(_ref101) {
-        var node = _ref101.node;
+      OPTION: function OPTION(_ref91) {
+        var node = _ref91.node;
         var withinOptionList = axe.utils.matchesSelector(node, 'select > option, datalist > option, optgroup > option');
         return !withinOptionList;
       },
-      SELECT: function SELECT(_ref102) {
-        var node = _ref102.node, role = _ref102.role;
+      SELECT: function SELECT(_ref92) {
+        var node = _ref92.node, role = _ref92.role;
         return !node.multiple && node.size <= 1 && role === 'menu';
       },
-      SVG: function SVG(_ref103) {
-        var node = _ref103.node, out = _ref103.out;
+      SVG: function SVG(_ref93) {
+        var node = _ref93.node, out = _ref93.out;
         if (node.parentNode && node.parentNode.namespaceURI === 'http://www.w3.org/2000/svg') {
           return true;
         }
@@ -22802,7 +22671,7 @@
     var is_accessible_ref_default = isAccessibleRef;
     function _isComboboxPopup(virtualNode) {
       var _popupRoles;
-      var _ref104 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, popupRoles = _ref104.popupRoles;
+      var _ref94 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, popupRoles = _ref94.popupRoles;
       var role = get_role_default(virtualNode);
       (_popupRoles = popupRoles) !== null && _popupRoles !== void 0 ? _popupRoles : popupRoles = aria_attrs_default['aria-haspopup'].values;
       if (!popupRoles.includes(role)) {
@@ -22985,8 +22854,8 @@
       if (!virtualNode.children) {
         return void 0;
       }
-      var titleNode = virtualNode.children.find(function(_ref105) {
-        var props = _ref105.props;
+      var titleNode = virtualNode.children.find(function(_ref95) {
+        var props = _ref95.props;
         return props.nodeName === 'title';
       });
       if (!titleNode) {
@@ -23123,8 +22992,8 @@
       }
       return false;
     }
-    function getNumberValue(domNode, _ref106) {
-      var cssProperty = _ref106.cssProperty, absoluteValues = _ref106.absoluteValues, normalValue = _ref106.normalValue;
+    function getNumberValue(domNode, _ref96) {
+      var cssProperty = _ref96.cssProperty, absoluteValues = _ref96.absoluteValues, normalValue = _ref96.normalValue;
       var computedStyle = window.getComputedStyle(domNode);
       var cssPropValue = computedStyle.getPropertyValue(cssProperty);
       if (cssPropValue === 'normal') {
@@ -23268,11 +23137,11 @@
           return [ virtualNode ];
         }
         return [];
-      } else if (node !== document.body && has_content_default(node, true)) {
+      } else if (node !== document.body && has_content_default(node, true) && !isShallowlyHidden(virtualNode)) {
         return [ virtualNode ];
       } else {
-        return virtualNode.children.filter(function(_ref107) {
-          var actualNode = _ref107.actualNode;
+        return virtualNode.children.filter(function(_ref97) {
+          var actualNode = _ref97.actualNode;
           return actualNode.nodeType === 1;
         }).map(function(vNode) {
           return findRegionlessElms(vNode, options);
@@ -23281,6 +23150,9 @@
         }, []);
       }
     }
+    function isShallowlyHidden(virtualNode) {
+      return [ 'none', 'presentation' ].includes(get_role_default(virtualNode)) && !hasChildTextNodes(virtualNode);
+    }
     function isRegion(virtualNode, options) {
       var node = virtualNode.actualNode;
       var role = get_role_default(virtualNode);
@@ -23417,7 +23289,7 @@
     var separatorRegex = /[;,\s]/;
     var validRedirectNumRegex = /^[0-9.]+$/;
     function metaRefreshEvaluate(node, options, virtualNode) {
-      var _ref108 = options || {}, minDelay = _ref108.minDelay, maxDelay = _ref108.maxDelay;
+      var _ref98 = options || {}, minDelay = _ref98.minDelay, maxDelay = _ref98.maxDelay;
       var content = (virtualNode.attr('content') || '').trim();
       var _content$split = content.split(separatorRegex), _content$split2 = _slicedToArray(_content$split, 1), redirectStr = _content$split2[0];
       if (!redirectStr.match(validRedirectNumRegex)) {
@@ -23689,7 +23561,7 @@
       var r = simpleAlphaCompositing(sourceColor.red, sourceColor.alpha, backdrop.red, backdrop.alpha, blendingResult.r * 255);
       var g2 = simpleAlphaCompositing(sourceColor.green, sourceColor.alpha, backdrop.green, backdrop.alpha, blendingResult.g * 255);
       var b2 = simpleAlphaCompositing(sourceColor.blue, sourceColor.alpha, backdrop.blue, backdrop.alpha, blendingResult.b * 255);
-      var \u03b1o = clamp3(sourceColor.alpha + backdrop.alpha * (1 - sourceColor.alpha), 0, 1);
+      var \u03b1o = clamp2(sourceColor.alpha + backdrop.alpha * (1 - sourceColor.alpha), 0, 1);
       if (\u03b1o === 0) {
         return new color_default(r, g2, b2, \u03b1o);
       }
@@ -23701,7 +23573,7 @@
     function simpleAlphaCompositing(Cs, \u03b1s, Cb, \u03b1b, blendingResult) {
       return \u03b1s * (1 - \u03b1b) * Cs + \u03b1s * \u03b1b * blendingResult + (1 - \u03b1s) * \u03b1b * Cb;
     }
-    function clamp3(value, min, max2) {
+    function clamp2(value, min, max2) {
       return Math.min(Math.max(min, value), max2);
     }
     function blend(Cb, Cs, blendMode) {
@@ -23780,10 +23652,10 @@
     var OPAQUE_STROKE_OFFSET_MIN_PX = 1.5;
     var edges = [ 'top', 'right', 'bottom', 'left' ];
     function _getStrokeColorsFromShadows(parsedShadows) {
-      var _ref109 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, _ref109$ignoreEdgeCou = _ref109.ignoreEdgeCount, ignoreEdgeCount = _ref109$ignoreEdgeCou === void 0 ? false : _ref109$ignoreEdgeCou;
+      var _ref99 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, _ref99$ignoreEdgeCoun = _ref99.ignoreEdgeCount, ignoreEdgeCount = _ref99$ignoreEdgeCoun === void 0 ? false : _ref99$ignoreEdgeCoun;
       var shadowMap = getShadowColorsMap(parsedShadows);
-      var shadowsByColor = Object.entries(shadowMap).map(function(_ref110) {
-        var _ref111 = _slicedToArray(_ref110, 2), colorStr = _ref111[0], sides = _ref111[1];
+      var shadowsByColor = Object.entries(shadowMap).map(function(_ref100) {
+        var _ref101 = _slicedToArray(_ref100, 2), colorStr = _ref101[0], sides = _ref101[1];
         var edgeCount = edges.filter(function(side) {
           return sides[side].length !== 0;
         }).length;
@@ -23793,8 +23665,8 @@
           edgeCount: edgeCount
         };
       });
-      if (!ignoreEdgeCount && shadowsByColor.some(function(_ref112) {
-        var edgeCount = _ref112.edgeCount;
+      if (!ignoreEdgeCount && shadowsByColor.some(function(_ref102) {
+        var edgeCount = _ref102.edgeCount;
         return edgeCount > 1 && edgeCount < 4;
       })) {
         return null;
@@ -23836,8 +23708,8 @@
       }
       return colorMap;
     }
-    function shadowGroupToColor(_ref113) {
-      var colorStr = _ref113.colorStr, sides = _ref113.sides, edgeCount = _ref113.edgeCount;
+    function shadowGroupToColor(_ref103) {
+      var colorStr = _ref103.colorStr, sides = _ref103.sides, edgeCount = _ref103.edgeCount;
       if (edgeCount !== 4) {
         return null;
       }
@@ -23888,8 +23760,8 @@
           throw new Error('Unable to process text-shadows: '.concat(str));
         }
       }
-      shadows.forEach(function(_ref114) {
-        var pixels = _ref114.pixels;
+      shadows.forEach(function(_ref104) {
+        var pixels = _ref104.pixels;
         if (pixels.length === 2) {
           pixels.push(0);
         }
@@ -23897,7 +23769,7 @@
       return shadows;
     }
     function _getTextShadowColors(node) {
-      var _ref115 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, minRatio = _ref115.minRatio, maxRatio = _ref115.maxRatio, ignoreEdgeCount = _ref115.ignoreEdgeCount;
+      var _ref105 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, minRatio = _ref105.minRatio, maxRatio = _ref105.maxRatio, ignoreEdgeCount = _ref105.ignoreEdgeCount;
       var shadowColors = [];
       var style = window.getComputedStyle(node);
       var textShadow = style.getPropertyValue('text-shadow');
@@ -23935,14 +23807,14 @@
             shadowColors.push.apply(shadowColors, _toConsumableArray(_strokeColors));
             thinShadows.splice(0, thinShadows.length);
           }
-          var _color4 = textShadowColor({
+          var _color3 = textShadowColor({
             colorStr: colorStr,
             offsetX: offsetX,
             offsetY: offsetY,
             blurRadius: blurRadius,
             fontSize: fontSize
           });
-          shadowColors.push(_color4);
+          shadowColors.push(_color3);
         }
       } catch (err) {
         _iterator17.e(err);
@@ -23960,8 +23832,8 @@
       }
       return shadowColors;
     }
-    function textShadowColor(_ref116) {
-      var colorStr = _ref116.colorStr, offsetX = _ref116.offsetX, offsetY = _ref116.offsetY, blurRadius = _ref116.blurRadius, fontSize = _ref116.fontSize;
+    function textShadowColor(_ref106) {
+      var colorStr = _ref106.colorStr, offsetX = _ref106.offsetX, offsetY = _ref106.offsetY, blurRadius = _ref106.blurRadius, fontSize = _ref106.fontSize;
       if (offsetX > blurRadius || offsetY > blurRadius) {
         return new color_default(0, 0, 0, 0);
       }
@@ -23990,13 +23862,13 @@
         var _stackingOrder2;
         var bgVNode = get_node_from_tree_default(bgElm);
         var bgColor = getOwnBackgroundColor2(bgVNode);
-        var stackingOrder = bgVNode._stackingOrder.filter(function(_ref117) {
-          var vNode = _ref117.vNode;
+        var stackingOrder = bgVNode._stackingOrder.filter(function(_ref107) {
+          var vNode = _ref107.vNode;
           return !!vNode;
         });
-        stackingOrder.forEach(function(_ref118, index) {
+        stackingOrder.forEach(function(_ref108, index) {
           var _stackingOrder;
-          var vNode = _ref118.vNode;
+          var vNode = _ref108.vNode;
           var ancestorVNode2 = (_stackingOrder = stackingOrder[index - 1]) === null || _stackingOrder === void 0 ? void 0 : _stackingOrder.vNode;
           var context2 = addToStackingContext(contextMap, vNode, ancestorVNode2);
           if (index === 0 && !contextMap.get(vNode)) {
@@ -24207,12 +24079,12 @@
       var fgColors = [];
       for (var _i32 = 0, _colorStack = colorStack; _i32 < _colorStack.length; _i32++) {
         var colorFn = _colorStack[_i32];
-        var _color5 = colorFn();
-        if (!_color5) {
+        var _color4 = colorFn();
+        if (!_color4) {
           continue;
         }
-        fgColors = fgColors.concat(_color5);
-        if (_color5.alpha === 1) {
+        fgColors = fgColors.concat(_color4);
+        if (_color4.alpha === 1) {
           break;
         }
       }
@@ -24232,8 +24104,8 @@
     function getTextColor(nodeStyle) {
       return new color_default().parseString(nodeStyle.getPropertyValue('-webkit-text-fill-color') || nodeStyle.getPropertyValue('color'));
     }
-    function getStrokeColor(nodeStyle, _ref119) {
-      var _ref119$textStrokeEmM = _ref119.textStrokeEmMin, textStrokeEmMin = _ref119$textStrokeEmM === void 0 ? 0 : _ref119$textStrokeEmM;
+    function getStrokeColor(nodeStyle, _ref109) {
+      var _ref109$textStrokeEmM = _ref109.textStrokeEmMin, textStrokeEmMin = _ref109$textStrokeEmM === void 0 ? 0 : _ref109$textStrokeEmM;
       var strokeWidth = parseFloat(nodeStyle.getPropertyValue('-webkit-text-stroke-width'));
       if (strokeWidth === 0) {
         return null;
@@ -24395,25 +24267,25 @@
       if (results.length < 2) {
         return results;
       }
-      var incompleteResults = results.filter(function(_ref120) {
-        var result = _ref120.result;
+      var incompleteResults = results.filter(function(_ref110) {
+        var result = _ref110.result;
         return result !== void 0;
       });
       var uniqueResults = [];
       var nameMap = {};
-      var _loop6 = function _loop6(index) {
+      var _loop8 = function _loop8(index) {
         var _currentResult$relate;
         var currentResult = incompleteResults[index];
         var _currentResult$data = currentResult.data, name = _currentResult$data.name, urlProps = _currentResult$data.urlProps;
         if (nameMap[name]) {
           return 1;
         }
-        var sameNameResults = incompleteResults.filter(function(_ref121, resultNum) {
-          var data = _ref121.data;
+        var sameNameResults = incompleteResults.filter(function(_ref111, resultNum) {
+          var data = _ref111.data;
           return data.name === name && resultNum !== index;
         });
-        var isSameUrl = sameNameResults.every(function(_ref122) {
-          var data = _ref122.data;
+        var isSameUrl = sameNameResults.every(function(_ref112) {
+          var data = _ref112.data;
           return isIdenticalObject(data.urlProps, urlProps);
         });
         if (sameNameResults.length && !isSameUrl) {
@@ -24427,7 +24299,7 @@
         uniqueResults.push(currentResult);
       };
       for (var index = 0; index < incompleteResults.length; index++) {
-        if (_loop6(index)) {
+        if (_loop8(index)) {
           continue;
         }
       }
@@ -24439,7 +24311,7 @@
       var headingRole = role && role.includes('heading');
       var ariaHeadingLevel = vNode.attr('aria-level');
       var ariaLevel = parseInt(ariaHeadingLevel, 10);
-      var _ref123 = vNode.props.nodeName.match(/h(\d)/) || [], _ref124 = _slicedToArray(_ref123, 2), headingLevel = _ref124[1];
+      var _ref113 = vNode.props.nodeName.match(/h(\d)/) || [], _ref114 = _slicedToArray(_ref113, 2), headingLevel = _ref114[1];
       if (!headingRole) {
         return -1;
       }
@@ -24499,14 +24371,14 @@
     }
     function getHeadingOrder(results) {
       results = _toConsumableArray(results);
-      results.sort(function(_ref125, _ref126) {
-        var nodeA = _ref125.node;
-        var nodeB = _ref126.node;
+      results.sort(function(_ref115, _ref116) {
+        var nodeA = _ref115.node;
+        var nodeB = _ref116.node;
         return nodeA.ancestry.length - nodeB.ancestry.length;
       });
       var headingOrder = results.reduce(mergeHeadingOrder, []);
-      return headingOrder.filter(function(_ref127) {
-        var level = _ref127.level;
+      return headingOrder.filter(function(_ref117) {
+        var level = _ref117.level;
         return level !== -1;
       });
     }
@@ -24659,12 +24531,14 @@
     }
     function getLargestUnobscuredArea(vNode, obscuredNodes) {
       var nodeRect = vNode.boundingClientRect;
-      var obscuringRects = obscuredNodes.map(function(_ref128) {
-        var rect = _ref128.boundingClientRect;
+      var obscuringRects = obscuredNodes.map(function(_ref118) {
+        var rect = _ref118.boundingClientRect;
         return rect;
       });
-      var unobscuredRects = _splitRects(nodeRect, obscuringRects);
-      if (unobscuredRects.length === 0) {
+      var unobscuredRects;
+      try {
+        unobscuredRects = _splitRects(nodeRect, obscuringRects);
+      } catch (err2) {
         return null;
       }
       return getLargestRect2(unobscuredRects);
@@ -24701,11 +24575,11 @@
       };
     }
     function isDescendantNotInTabOrder2(vAncestor, vNode) {
-      return vAncestor.actualNode.contains(vNode.actualNode) && !_isInTabOrder(vNode);
+      return _contains(vAncestor, vNode) && !_isInTabOrder(vNode);
     }
     function mapActualNodes(vNodes) {
-      return vNodes.map(function(_ref129) {
-        var actualNode = _ref129.actualNode;
+      return vNodes.map(function(_ref119) {
+        var actualNode = _ref119.actualNode;
         return actualNode;
       });
     }
@@ -24728,7 +24602,24 @@
           if (get_role_type_default(vNeighbor) !== 'widget' || !_isFocusable(vNeighbor)) {
             continue;
           }
-          var offset = roundToSingleDecimal(_getOffset(vNode, vNeighbor, minOffset / 2)) * 2;
+          var offset = null;
+          try {
+            offset = _getOffset(vNode, vNeighbor, minOffset / 2);
+          } catch (err2) {
+            if (err2.message.startsWith('splitRects')) {
+              this.data({
+                messageKey: 'tooManyRects',
+                closestOffset: 0,
+                minOffset: minOffset
+              });
+              return void 0;
+            }
+            throw err2;
+          }
+          if (offset === null) {
+            continue;
+          }
+          offset = roundToSingleDecimal(offset) * 2;
           if (offset + roundingMargin2 >= minOffset) {
             continue;
           }
@@ -24747,8 +24638,8 @@
         });
         return true;
       }
-      this.relatedNodes(closeNeighbors.map(function(_ref130) {
-        var actualNode = _ref130.actualNode;
+      this.relatedNodes(closeNeighbors.map(function(_ref120) {
+        var actualNode = _ref120.actualNode;
         return actualNode;
       }));
       if (!closeNeighbors.some(_isInTabOrder)) {
@@ -24769,7 +24660,7 @@
       return Math.round(num * 10) / 10;
     }
     function metaViewportScaleEvaluate(node, options, virtualNode) {
-      var _ref131 = options || {}, _ref131$scaleMinimum = _ref131.scaleMinimum, scaleMinimum = _ref131$scaleMinimum === void 0 ? 2 : _ref131$scaleMinimum, _ref131$lowerBound = _ref131.lowerBound, lowerBound = _ref131$lowerBound === void 0 ? false : _ref131$lowerBound;
+      var _ref121 = options || {}, _ref121$scaleMinimum = _ref121.scaleMinimum, scaleMinimum = _ref121$scaleMinimum === void 0 ? 2 : _ref121$scaleMinimum, _ref121$lowerBound = _ref121.lowerBound, lowerBound = _ref121$lowerBound === void 0 ? false : _ref121$lowerBound;
       var content = virtualNode.attr('content') || '';
       if (!content) {
         return true;
@@ -24814,23 +24705,23 @@
     }
     var meta_viewport_scale_evaluate_default = metaViewportScaleEvaluate;
     function cssOrientationLockEvaluate(node, options, virtualNode, context) {
-      var _ref132 = context || {}, _ref132$cssom = _ref132.cssom, cssom = _ref132$cssom === void 0 ? void 0 : _ref132$cssom;
-      var _ref133 = options || {}, _ref133$degreeThresho = _ref133.degreeThreshold, degreeThreshold = _ref133$degreeThresho === void 0 ? 0 : _ref133$degreeThresho;
+      var _ref122 = context || {}, _ref122$cssom = _ref122.cssom, cssom = _ref122$cssom === void 0 ? void 0 : _ref122$cssom;
+      var _ref123 = options || {}, _ref123$degreeThresho = _ref123.degreeThreshold, degreeThreshold = _ref123$degreeThresho === void 0 ? 0 : _ref123$degreeThresho;
       if (!cssom || !cssom.length) {
         return void 0;
       }
       var isLocked = false;
       var relatedElements = [];
       var rulesGroupByDocumentFragment = groupCssomByDocument(cssom);
-      var _loop7 = function _loop7() {
+      var _loop9 = function _loop9() {
         var key = _Object$keys3[_i33];
         var _rulesGroupByDocument = rulesGroupByDocumentFragment[key], root = _rulesGroupByDocument.root, rules = _rulesGroupByDocument.rules;
         var orientationRules = rules.filter(isMediaRuleWithOrientation);
         if (!orientationRules.length) {
           return 1;
         }
-        orientationRules.forEach(function(_ref134) {
-          var cssRules = _ref134.cssRules;
+        orientationRules.forEach(function(_ref124) {
+          var cssRules = _ref124.cssRules;
           Array.from(cssRules).forEach(function(cssRule) {
             var locked = getIsOrientationLocked(cssRule);
             if (locked && cssRule.selectorText.toUpperCase() !== 'HTML') {
@@ -24842,7 +24733,7 @@
         });
       };
       for (var _i33 = 0, _Object$keys3 = Object.keys(rulesGroupByDocumentFragment); _i33 < _Object$keys3.length; _i33++) {
-        if (_loop7()) {
+        if (_loop9()) {
           continue;
         }
       }
@@ -24854,8 +24745,8 @@
       }
       return false;
       function groupCssomByDocument(cssObjectModel) {
-        return cssObjectModel.reduce(function(out, _ref135) {
-          var sheet = _ref135.sheet, root = _ref135.root, shadowId = _ref135.shadowId;
+        return cssObjectModel.reduce(function(out, _ref125) {
+          var sheet = _ref125.sheet, root = _ref125.root, shadowId = _ref125.shadowId;
           var key = shadowId ? shadowId : 'topDocument';
           if (!out[key]) {
             out[key] = {
@@ -24871,15 +24762,15 @@
           return out;
         }, {});
       }
-      function isMediaRuleWithOrientation(_ref136) {
-        var type2 = _ref136.type, cssText = _ref136.cssText;
+      function isMediaRuleWithOrientation(_ref126) {
+        var type2 = _ref126.type, cssText = _ref126.cssText;
         if (type2 !== 4) {
           return false;
         }
         return /orientation:\s*landscape/i.test(cssText) || /orientation:\s*portrait/i.test(cssText);
       }
-      function getIsOrientationLocked(_ref137) {
-        var selectorText = _ref137.selectorText, style = _ref137.style;
+      function getIsOrientationLocked(_ref127) {
+        var selectorText = _ref127.selectorText, style = _ref127.style;
         if (!selectorText || style.length <= 0) {
           return false;
         }
@@ -24934,7 +24825,7 @@
         }
       }
       function getAngleInDegrees(angleWithUnit) {
-        var _ref138 = angleWithUnit.match(/(deg|grad|rad|turn)/) || [], _ref139 = _slicedToArray(_ref138, 1), unit = _ref139[0];
+        var _ref128 = angleWithUnit.match(/(deg|grad|rad|turn)/) || [], _ref129 = _slicedToArray(_ref128, 1), unit = _ref129[0];
         if (!unit) {
           return 0;
         }
@@ -25089,7 +24980,7 @@
     function onlyListitemsEvaluate(node, options, virtualNode) {
       var hasNonEmptyTextNode = false;
       var atLeastOneListitem = false;
-      var isEmpty = true;
+      var isEmpty2 = true;
       var badNodes = [];
       var badRoleNodes = [];
       var badRoles = [];
@@ -25102,7 +24993,7 @@
         if (actualNode.nodeType !== 1 || !_isVisibleToScreenReaders(actualNode)) {
           return;
         }
-        isEmpty = false;
+        isEmpty2 = false;
         var isLi = actualNode.nodeName.toUpperCase() === 'LI';
         var role = get_role_default(vNode);
         var isListItemRole = role === 'listitem';
@@ -25123,7 +25014,7 @@
         this.relatedNodes(badNodes);
         return true;
       }
-      if (isEmpty || atLeastOneListitem) {
+      if (isEmpty2 || atLeastOneListitem) {
         return false;
       }
       this.relatedNodes(badRoleNodes);
@@ -25225,8 +25116,8 @@
       this.relatedNodes(relatedNodes);
       return true;
     }
-    function getInvalidSelector(vChild, nested, _ref140) {
-      var _ref140$validRoles = _ref140.validRoles, validRoles = _ref140$validRoles === void 0 ? [] : _ref140$validRoles, _ref140$validNodeName = _ref140.validNodeNames, validNodeNames = _ref140$validNodeName === void 0 ? [] : _ref140$validNodeName;
+    function getInvalidSelector(vChild, nested, _ref130) {
+      var _ref130$validRoles = _ref130.validRoles, validRoles = _ref130$validRoles === void 0 ? [] : _ref130$validRoles, _ref130$validNodeName = _ref130.validNodeNames, validNodeNames = _ref130$validNodeName === void 0 ? [] : _ref130$validNodeName;
       var _vChild$props = vChild.props, nodeName2 = _vChild$props.nodeName, nodeType = _vChild$props.nodeType, nodeValue = _vChild$props.nodeValue;
       var selector = nested ? 'div > ' : '';
       if (nodeType === 3 && nodeValue.trim() !== '') {
@@ -25413,9 +25304,6 @@
       var pixelThreshold = options === null || options === void 0 ? void 0 : options.pixelThreshold;
       var occurrenceThreshold = (_options$occurrenceTh = options === null || options === void 0 ? void 0 : options.occurrenceThreshold) !== null && _options$occurrenceTh !== void 0 ? _options$occurrenceTh : options === null || options === void 0 ? void 0 : options.occuranceThreshold;
       var accText = accessible_text_default(node).toLowerCase();
-      if (is_human_interpretable_default(accText) < 1) {
-        return void 0;
-      }
       var visibleText = sanitize_default(subtree_text_default(virtualNode, {
         subtreeDescendant: true,
         ignoreIconLigature: true,
@@ -25425,10 +25313,7 @@
       if (!visibleText) {
         return true;
       }
-      if (is_human_interpretable_default(visibleText) < 1) {
-        if (isStringContained(visibleText, accText)) {
-          return true;
-        }
+      if (is_human_interpretable_default(accText) < 1 || is_human_interpretable_default(visibleText) < 1) {
         return void 0;
       }
       return isStringContained(visibleText, accText);
@@ -25683,8 +25568,8 @@
     }
     var focusable_no_name_evaluate_default = focusableNoNameEvaluate;
     function focusableModalOpenEvaluate(node, options, virtualNode) {
-      var tabbableElements = virtualNode.tabbableElements.map(function(_ref141) {
-        var actualNode = _ref141.actualNode;
+      var tabbableElements = virtualNode.tabbableElements.map(function(_ref131) {
+        var actualNode = _ref131.actualNode;
         return actualNode;
       });
       if (!tabbableElements || !tabbableElements.length) {
@@ -26079,7 +25964,7 @@
       var bold = parseFloat(fontWeight) >= boldValue || fontWeight === 'bold';
       var ptSize = Math.ceil(fontSize * 72) / 96;
       var isSmallFont = bold && ptSize < boldTextPt || !bold && ptSize < largeTextPt;
-      var _ref142 = isSmallFont ? contrastRatio.normal : contrastRatio.large, expected = _ref142.expected, minThreshold = _ref142.minThreshold, maxThreshold = _ref142.maxThreshold;
+      var _ref132 = isSmallFont ? contrastRatio.normal : contrastRatio.large, expected = _ref132.expected, minThreshold = _ref132.minThreshold, maxThreshold = _ref132.maxThreshold;
       var pseudoElm = findPseudoElement(virtualNode, {
         ignorePseudo: ignorePseudo,
         pseudoSizeThreshold: pseudoSizeThreshold
@@ -26164,8 +26049,8 @@
       }
       return isValid;
     }
-    function findPseudoElement(vNode, _ref143) {
-      var _ref143$pseudoSizeThr = _ref143.pseudoSizeThreshold, pseudoSizeThreshold = _ref143$pseudoSizeThr === void 0 ? .25 : _ref143$pseudoSizeThr, _ref143$ignorePseudo = _ref143.ignorePseudo, ignorePseudo = _ref143$ignorePseudo === void 0 ? false : _ref143$ignorePseudo;
+    function findPseudoElement(vNode, _ref133) {
+      var _ref133$pseudoSizeThr = _ref133.pseudoSizeThreshold, pseudoSizeThreshold = _ref133$pseudoSizeThr === void 0 ? .25 : _ref133$pseudoSizeThr, _ref133$ignorePseudo = _ref133.ignorePseudo, ignorePseudo = _ref133$ignorePseudo === void 0 ? false : _ref133$ignorePseudo;
       if (ignorePseudo) {
         return;
       }
@@ -26207,7 +26092,7 @@
     }
     function parseUnit(str) {
       var unitRegex = /^([0-9.]+)([a-z]+)$/i;
-      var _ref144 = str.match(unitRegex) || [], _ref145 = _slicedToArray(_ref144, 3), _ref145$ = _ref145[1], value = _ref145$ === void 0 ? '' : _ref145$, _ref145$2 = _ref145[2], unit = _ref145$2 === void 0 ? '' : _ref145$2;
+      var _ref134 = str.match(unitRegex) || [], _ref135 = _slicedToArray(_ref134, 3), _ref135$ = _ref135[1], value = _ref135$ === void 0 ? '' : _ref135$, _ref135$2 = _ref135[2], unit = _ref135$2 === void 0 ? '' : _ref135$2;
       return {
         value: parseFloat(value),
         unit: unit.toLowerCase()
@@ -26220,16 +26105,21 @@
       SECTION: true
     };
     var VALID_ROLES_FOR_SCROLLABLE_REGIONS = {
+      alert: true,
+      alertdialog: true,
       application: true,
       article: true,
       banner: false,
       complementary: true,
       contentinfo: true,
+      dialog: true,
       form: true,
+      log: true,
       main: true,
       navigation: true,
       region: true,
-      search: false
+      search: false,
+      status: true
     };
     function validScrollableTagName(node) {
       var nodeName2 = node.nodeName.toUpperCase();
@@ -26290,7 +26180,7 @@
     function invalidroleEvaluate(node, options, virtualNode) {
       var allRoles = token_list_default(virtualNode.attr('role'));
       var allInvalid = allRoles.every(function(role) {
-        return !is_valid_role_default(role, {
+        return !is_valid_role_default(role.toLowerCase(), {
           allowAbstract: true
         });
       });
@@ -26384,7 +26274,12 @@
       var skipAttrs = [ 'aria-errormessage' ];
       var preChecks = {
         'aria-controls': function ariaControls() {
-          return virtualNode.attr('aria-expanded') !== 'false' && virtualNode.attr('aria-selected') !== 'false';
+          var hasPopup = [ 'false', null ].includes(virtualNode.attr('aria-haspopup')) === false;
+          if (hasPopup) {
+            needsReview = 'aria-controls="'.concat(virtualNode.attr('aria-controls'), '"');
+            messageKey = 'controlsWithinPopup';
+          }
+          return virtualNode.attr('aria-expanded') !== 'false' && virtualNode.attr('aria-selected') !== 'false' && hasPopup === false;
         },
         'aria-current': function ariaCurrent(validValue) {
           if (!validValue) {
@@ -26574,19 +26469,19 @@
         return true;
       }
       var ownedRoles = getOwnedRoles(virtualNode, required);
-      var unallowed = ownedRoles.filter(function(_ref146) {
-        var role = _ref146.role, vNode = _ref146.vNode;
+      var unallowed = ownedRoles.filter(function(_ref136) {
+        var role = _ref136.role, vNode = _ref136.vNode;
         return vNode.props.nodeType === 1 && !required.includes(role);
       });
       if (unallowed.length) {
-        this.relatedNodes(unallowed.map(function(_ref147) {
-          var vNode = _ref147.vNode;
+        this.relatedNodes(unallowed.map(function(_ref137) {
+          var vNode = _ref137.vNode;
           return vNode;
         }));
         this.data({
           messageKey: 'unallowed',
-          values: unallowed.map(function(_ref148) {
-            var vNode = _ref148.vNode, attr = _ref148.attr;
+          values: unallowed.map(function(_ref138) {
+            var vNode = _ref138.vNode, attr = _ref138.attr;
             return getUnallowedSelector(vNode, attr);
           }).filter(function(selector, index, array) {
             return array.indexOf(selector) === index;
@@ -26613,7 +26508,7 @@
       var vNode;
       var ownedRoles = [];
       var ownedVirtual = get_owned_virtual_default(virtualNode);
-      var _loop8 = function _loop8() {
+      var _loop10 = function _loop10() {
         if (vNode.props.nodeType === 3) {
           ownedRoles.push({
             vNode: vNode,
@@ -26642,15 +26537,15 @@
         }
       };
       while (vNode = ownedVirtual.shift()) {
-        if (_loop8()) {
+        if (_loop10()) {
           continue;
         }
       }
       return ownedRoles;
     }
     function hasRequiredChildren(required, ownedRoles) {
-      return ownedRoles.some(function(_ref149) {
-        var role = _ref149.role;
+      return ownedRoles.some(function(_ref139) {
+        var role = _ref139.role;
         return role && required.includes(role);
       });
     }
@@ -26675,8 +26570,8 @@
       }
       return nodeName2;
     }
-    function isContent(_ref150) {
-      var vNode = _ref150.vNode;
+    function isContent(_ref140) {
+      var vNode = _ref140.vNode;
       if (vNode.props.nodeType === 3) {
         return vNode.props.nodeValue.trim().length > 0;
       }
@@ -26817,7 +26712,7 @@
     }
     function ariaConditionalRowAttr(node) {
       var _invalidTableRowAttrs, _invalidTableRowAttrs2;
-      var _ref151 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, invalidTableRowAttrs = _ref151.invalidTableRowAttrs;
+      var _ref141 = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, invalidTableRowAttrs = _ref141.invalidTableRowAttrs;
       var virtualNode = arguments.length > 2 ? arguments[2] : undefined;
       var invalidAttrs = (_invalidTableRowAttrs = invalidTableRowAttrs === null || invalidTableRowAttrs === void 0 || (_invalidTableRowAttrs2 = invalidTableRowAttrs.filter) === null || _invalidTableRowAttrs2 === void 0 ? void 0 : _invalidTableRowAttrs2.call(invalidTableRowAttrs, function(invalidAttr) {
         return virtualNode.hasAttr(invalidAttr);
@@ -27051,7 +26946,7 @@
       if (!role || [ 'none', 'presentation' ].includes(role)) {
         return true;
       }
-      var _ref152 = aria_roles_default[role] || {}, accessibleNameRequired = _ref152.accessibleNameRequired;
+      var _ref142 = aria_roles_default[role] || {}, accessibleNameRequired = _ref142.accessibleNameRequired;
       if (accessibleNameRequired || _isFocusable(virtualNode)) {
         return true;
       }
@@ -28153,7 +28048,7 @@
         this._init();
         this._defaultLocale = null;
       }
-      _createClass(Audit, [ {
+      return _createClass(Audit, [ {
         key: '_setDefaultLocale',
         value: function _setDefaultLocale() {
           if (this._defaultLocale) {
@@ -28543,7 +28438,6 @@
           this._resetLocale();
         }
       } ]);
-      return Audit;
     }();
     var audit_default = Audit;
     function getDefaultOrigin() {
@@ -28668,8 +28562,8 @@
         });
       };
     }
-    function getHelpUrl(_ref153, ruleId, version) {
-      var brand = _ref153.brand, application = _ref153.application, lang = _ref153.lang;
+    function getHelpUrl(_ref143, ruleId, version) {
+      var brand = _ref143.brand, application = _ref143.application, lang = _ref143.lang;
       return constants_default.helpUrlBase + brand + '/' + (version || axe.version.substring(0, axe.version.lastIndexOf('.'))) + '/' + ruleId + '?application=' + encodeURIComponent(application) + (lang && lang !== 'en' ? '&lang=' + encodeURIComponent(lang) : '');
     }
     function setupGlobals(context) {
@@ -28883,9 +28777,9 @@
         toolOptions: options
       });
     }
-    function normalizeRunParams(_ref154) {
-      var _ref156, _options$reporter, _axe$_audit;
-      var _ref155 = _slicedToArray(_ref154, 3), context = _ref155[0], options = _ref155[1], callback = _ref155[2];
+    function normalizeRunParams(_ref144) {
+      var _ref146, _options$reporter, _axe$_audit;
+      var _ref145 = _slicedToArray(_ref144, 3), context = _ref145[0], options = _ref145[1], callback = _ref145[2];
       var typeErr = new TypeError('axe.run arguments are invalid');
       if (!isContextSpec(context)) {
         if (callback !== void 0) {
@@ -28906,7 +28800,7 @@
         throw typeErr;
       }
       options = _clone(options);
-      options.reporter = (_ref156 = (_options$reporter = options.reporter) !== null && _options$reporter !== void 0 ? _options$reporter : (_axe$_audit = axe._audit) === null || _axe$_audit === void 0 ? void 0 : _axe$_audit.reporter) !== null && _ref156 !== void 0 ? _ref156 : 'v1';
+      options.reporter = (_ref146 = (_options$reporter = options.reporter) !== null && _options$reporter !== void 0 ? _options$reporter : (_axe$_audit = axe._audit) === null || _axe$_audit === void 0 ? void 0 : _axe$_audit.reporter) !== null && _ref146 !== void 0 ? _ref146 : 'v1';
       return {
         context: context,
         options: options,
@@ -29021,8 +28915,8 @@
         axe._audit.run(contextObj, options, res, rej);
       }).then(function(results) {
         results = node_serializer_default.mapRawResults(results);
-        var frames = contextObj.frames.map(function(_ref157) {
-          var node = _ref157.node;
+        var frames = contextObj.frames.map(function(_ref147) {
+          var node = _ref147.node;
           return node_serializer_default.toSpec(node);
         });
         var environmentData;
@@ -29043,14 +28937,14 @@
       });
     }
     function finishRun(partialResults) {
-      var _ref159, _options$reporter2, _axe$_audit2;
+      var _ref149, _options$reporter2, _axe$_audit2;
       var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
       options = _clone(options);
-      var _ref158 = partialResults.find(function(r) {
+      var _ref148 = partialResults.find(function(r) {
         return r.environmentData;
-      }) || {}, environmentData = _ref158.environmentData;
+      }) || {}, environmentData = _ref148.environmentData;
       axe._audit.normalizeOptions(options);
-      options.reporter = (_ref159 = (_options$reporter2 = options.reporter) !== null && _options$reporter2 !== void 0 ? _options$reporter2 : (_axe$_audit2 = axe._audit) === null || _axe$_audit2 === void 0 ? void 0 : _axe$_audit2.reporter) !== null && _ref159 !== void 0 ? _ref159 : 'v1';
+      options.reporter = (_ref149 = (_options$reporter2 = options.reporter) !== null && _options$reporter2 !== void 0 ? _options$reporter2 : (_axe$_audit2 = axe._audit) === null || _axe$_audit2 === void 0 ? void 0 : _axe$_audit2.reporter) !== null && _ref149 !== void 0 ? _ref149 : 'v1';
       setFrameSpec(partialResults);
       var results = merge_results_default(partialResults);
       results = axe._audit.after(results, options);
@@ -29080,8 +28974,8 @@
         _iterator23.f();
       }
     }
-    function getMergedFrameSpecs(_ref160) {
-      var childFrameSpecs = _ref160.frames, parentFrameSpec = _ref160.frameSpec;
+    function getMergedFrameSpecs(_ref150) {
+      var childFrameSpecs = _ref150.frames, parentFrameSpec = _ref150.frameSpec;
       if (!parentFrameSpec) {
         return childFrameSpecs;
       }
@@ -29145,8 +29039,8 @@
       var transformedResults = results.map(function(result) {
         var transformedResult = _extends({}, result);
         var types = [ 'passes', 'violations', 'incomplete', 'inapplicable' ];
-        for (var _i43 = 0, _types2 = types; _i43 < _types2.length; _i43++) {
-          var type2 = _types2[_i43];
+        for (var _i43 = 0, _types = types; _i43 < _types.length; _i43++) {
+          var type2 = _types[_i43];
           transformedResult[type2] = node_serializer_default.mapRawNodeResults(transformedResult[type2]);
         }
         return transformedResult;
@@ -29862,7 +29756,8 @@
               noIdShadow: 'ARIA attribute element ID does not exist on the page or is a descendant of a different shadow DOM tree: ${data.needsReview}',
               ariaCurrent: 'ARIA attribute value is invalid and will be treated as "aria-current=true": ${data.needsReview}',
               idrefs: 'Unable to determine if ARIA attribute element ID exists on the page: ${data.needsReview}',
-              empty: 'ARIA attribute value is ignored while empty: ${data.needsReview}'
+              empty: 'ARIA attribute value is ignored while empty: ${data.needsReview}',
+              controlsWithinPopup: 'Unable to determine if aria-controls referenced ID exists on the page while using aria-haspopup: ${data.needsReview}'
             }
           }
         },
@@ -30373,7 +30268,8 @@
             fail: 'Target has insufficient space to its closest neighbors. Safe clickable space has a diameter of ${data.closestOffset}px instead of at least ${data.minOffset}px.',
             incomplete: {
               default: 'Element with negative tabindex has insufficient space to its closest neighbors. Safe clickable space has a diameter of ${data.closestOffset}px instead of at least ${data.minOffset}px. Is this a target?',
-              nonTabbableNeighbor: 'Target has insufficient space to its closest neighbors. Safe clickable space has a diameter of ${data.closestOffset}px instead of at least ${data.minOffset}px. Is the neighbor a target?'
+              nonTabbableNeighbor: 'Target has insufficient space to its closest neighbors. Safe clickable space has a diameter of ${data.closestOffset}px instead of at least ${data.minOffset}px. Is the neighbor a target?',
+              tooManyRects: 'Could not get the target size because there are too many overlapping elements'
             }
           }
         },
diff --git a/node_modules/axe-core/axe.min.js b/node_modules/axe-core/axe.min.js
index 87393bb95..421a05a40 100644
--- a/node_modules/axe-core/axe.min.js
+++ b/node_modules/axe-core/axe.min.js
@@ -1,4 +1,4 @@
-/*! axe v4.9.0
+/*! axe v4.9.1
  * Copyright (c) 2015 - 2024 Deque Systems, Inc.
  *
  * Your use of this Source Code Form is subject to the terms of the Mozilla Public
@@ -9,4 +9,4 @@
  * distribute or in any file that contains substantial portions of this source
  * code.
  */
-!function O(window){var q=window,document=window.document;function te(e){return(te="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e})(e)}var axe=axe||{};function _(e){this.name="SupportError",this.cause=e.cause,this.message="`".concat(e.cause,"` - feature unsupported in your environment."),e.ruleId&&(this.ruleId=e.ruleId,this.message+=" Skipping ".concat(this.ruleId," rule.")),this.stack=(new Error).stack}axe.version="4.9.0","function"==typeof define&&define.amd&&define("axe-core",[],function(){return axe}),"object"===("undefined"==typeof module?"undefined":te(module))&&module.exports&&"function"==typeof O.toString&&(axe.source="("+O.toString()+')(typeof window === "object" ? window : this);',module.exports=axe),"function"==typeof window.getComputedStyle&&(window.axe=axe),(_.prototype=Object.create(Error.prototype)).constructor=_;var M=["node"],I=["relatedNodes"],P=["node"],B=["variant"],j=["matches"],L=["chromium"],z=["noImplicit"],V=["noPresentational"],H=["precision","format","inGamut"],$=["space"],U=["algorithm"],W=["method"],G=["maxDeltaE","deltaEMethod","steps","maxSteps"],Y=["node"],K=["environmentData"],X=["environmentData"],Z=["environmentData"],J=["environmentData"],Q=["environmentData"];function ee(e){return ge(e)||me(e)||De(e)||he()}function ne(e,t,n){re(e,t),t.set(e,n)}function re(e,t){if(t.has(e))throw new TypeError("Cannot initialize the same private elements twice on an object")}function ae(e,t,n){t=ie(t,e);if(t.set)t.set.call(e,n);else{if(!t.writable)throw new TypeError("attempted to set read only private field");t.value=n}}function oe(e,t){t=ie(t,e);return t.get?t.get.call(e):t.value}function ie(e,t){return e.get(se(e,t))}function se(e,t,n){if("function"==typeof e?e===t:e.has(t))return arguments.length<3?t:n;throw new TypeError("Private element is not present on this object")}function le(e,t,n){t=de(t);var r=e,t=ce()?Reflect.construct(t,n||[],de(e).constructor):t.apply(e,n);if(t&&("object"===te(t)||"function"==typeof t))return t;if(void 0!==t)throw new TypeError("Derived constructors may only return object or undefined");return ue(r)}function ue(e){if(void 0===e)throw new ReferenceError("this hasn't been initialised - super() hasn't been called");return e}function ce(){try{var e=!Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){}))}catch(e){}return(ce=function(){return!!e})()}function de(e){return(de=Object.setPrototypeOf?Object.getPrototypeOf.bind():function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}function pe(e,t){if("function"!=typeof t&&null!==t)throw new TypeError("Super expression must either be null or a function");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),Object.defineProperty(e,"prototype",{writable:!1}),t&&fe(e,t)}function fe(e,t){return(fe=Object.setPrototypeOf?Object.setPrototypeOf.bind():function(e,t){return e.__proto__=t,e})(e,t)}function b(e,t){if(null==e)return{};var n,r=function(e,t){if(null==e)return{};var n,r,a={},o=Object.keys(e);for(r=0;r=e.length?{done:!0}:{done:!1,value:e[o++]}},e:function(e){throw e},f:t};throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}function De(e,t){var n;if(e)return"string"==typeof e?xe(e,t):"Map"===(n="Object"===(n=Object.prototype.toString.call(e).slice(8,-1))&&e.constructor?e.constructor.name:n)||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?xe(e,t):void 0}function xe(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);n"}}}),sn=e(function(e,t){"use strict";var n=on(),r=/[\n\r\u2028\u2029]/g;t.exports=function(e){e=n(e);return e=(e=100":!0,"?":!0,"@":!0,"[":!0,"\\":!0,"]":!0,"^":!0,"`":!0,"{":!0,"|":!0,"}":!0,"~":!0},i.strReplacementsRev={"\n":"\\n","\r":"\\r","\t":"\\t","\f":"\\f","\v":"\\v"},i.singleQuoteEscapeChars={n:"\n",r:"\r",t:"\t",f:"\f","\\":"\\","'":"'"},i.doubleQuotesEscapeChars={n:"\n",r:"\r",t:"\t",f:"\f","\\":"\\",'"':'"'}}),yn=e(function(e){"use strict";Object.defineProperty(e,"__esModule",{value:!0});var b=vn();e.parseCssSelector=function(o,i,s,l,a,u){var c=o.length,d="";function p(e,t){var n="";for(i++,d=o.charAt(i);i>0},ToUint32:function(e){return e>>>0}}),f=Math.LN2,m=Math.abs,h=Math.floor,g=Math.log,b=Math.min,v=Math.pow,P=Math.round;function a(e,t,n){return er)throw new RangeError("Array too large for polyfill");for(var e=0;e>t}function A(e,t){t=32-t;return e<>>t}function j(e){return[255&e]}function L(e){return C(e[0],8)}function q(e){return[255&e]}function N(e){return A(e[0],8)}function z(e){return[(e=P(Number(e)))<0?0:255>8&255,255&e]}function H(e){return C(e[0]<<8|e[1],16)}function $(e){return[e>>8&255,255&e]}function U(e){return A(e[0]<<8|e[1],16)}function W(e){return[e>>24&255,e>>16&255,e>>8&255,255&e]}function G(e){return C(e[0]<<24|e[1]<<16|e[2]<<8|e[3],32)}function Y(e){return[e>>24&255,e>>16&255,e>>8&255,255&e]}function K(e){return A(e[0]<<24|e[1]<<16|e[2]<<8|e[3],32)}function k(e,t,n){var r,a,o,i,s,l,u,c=(1<=v(2,1-c)?(a=b(h(g(e)/f),1023),2<=(o=d(e/v(2,a)*v(2,n)))/v(2,n)&&(a+=1,o=1),c>=1;return u.reverse(),l=u.join(""),o=(1<this.buffer.byteLength)throw new RangeError("byteOffset out of range");if(this.byteOffset%this.BYTES_PER_ELEMENT)throw new RangeError("ArrayBuffer length minus the byteOffset is not a multiple of the element size.");if(arguments.length<3){if(this.byteLength=this.buffer.byteLength-this.byteOffset,this.byteLength%this.BYTES_PER_ELEMENT)throw new RangeError("length of buffer minus byteOffset not a multiple of the element size");this.length=this.byteLength/this.BYTES_PER_ELEMENT}else this.length=p.ToUint32(n),this.byteLength=this.length*this.BYTES_PER_ELEMENT;if(this.byteOffset+this.byteLength>this.buffer.byteLength)throw new RangeError("byteOffset and length reference an area beyond the end of the buffer")}else for(this.length=p.ToUint32((a=e).length),this.byteLength=this.length*this.BYTES_PER_ELEMENT,this.buffer=new R(this.byteLength),o=this.byteOffset=0;o=this.length)){for(var t=[],n=0,r=this.byteOffset+e*this.BYTES_PER_ELEMENT;nthis.length)throw new RangeError("Offset plus length of array is out of range");if(u=this.byteOffset+a*this.BYTES_PER_ELEMENT,c=n.length*this.BYTES_PER_ELEMENT,n.buffer===this.buffer){for(d=[],i=0,s=n.byteOffset;ithis.length)throw new RangeError("Offset plus length of array is out of range");for(i=0;ithis.buffer.byteLength)throw new RangeError("byteOffset out of range");if(this.byteLength=arguments.length<3?this.buffer.byteLength-this.byteOffset:p.ToUint32(n),this.byteOffset+this.byteLength>this.buffer.byteLength)throw new RangeError("byteOffset and length reference an area beyond the end of the buffer");F(this)}function M(o){return function(e,t){if((e=p.ToUint32(e))+o.BYTES_PER_ELEMENT>this.byteLength)throw new RangeError("Array index out of range");e+=this.byteOffset;for(var n=new s.Uint8Array(this.buffer,e,o.BYTES_PER_ELEMENT),r=[],a=0;athis.byteLength)throw new RangeError("Array index out of range");for(var t=new i([t]),r=new s.Uint8Array(t.buffer),a=[],o=0;oa;)!i(r,n=t[a++])||~l(o,n)||c(o,n);return o}}),Pr=e(function(e,t){"use strict";t.exports=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"]}),Br=e(function(e,t){"use strict";var n=Ir(),r=Pr();t.exports=Object.keys||function(e){return n(e,r)}}),jr=e(function(e,t){"use strict";function n(l){return function(e){for(var t,n=p(e),r=d(n),a=h&&null===c(n),o=r.length,i=0,s=[];i":">",'"':""","'":"'","/":"/"},n=e?/[&<>"'\/]/g:/&(?!#?\w+;)|<|>|"|'|\//g;return function(e){return e?e.toString().replace(n,function(e){return t[e]||e}):""}},void 0!==t&&t.exports?t.exports=s:"function"==typeof define&&define.amd?define(function(){return s}):globalThis.doT=s;var l={append:{start:"'+(",end:")+'",startencode:"'+encodeHTML("},split:{start:"';out+=(",end:");out+='",startencode:"';out+=encodeHTML("}},u=/$^/;function c(e){return e.replace(/\\('|\\)/g,"$1").replace(/[\r\t\n]/g," ")}s.template=function(e,t,n){var r,a,o=(t=t||s.templateSettings).append?l.append:l.split,i=0,n=t.use||t.define?function n(a,e,o){return("string"==typeof e?e:e.toString()).replace(a.define||u,function(e,r,t,n){return(r=0===r.indexOf("def.")?r.substring(4):r)in o||(":"===t?(a.defineParams&&n.replace(a.defineParams,function(e,t,n){o[r]={arg:t,text:n}}),r in o||(o[r]=n)):new Function("def","def['"+r+"']="+n)(o)),""}).replace(a.use||u,function(e,t){return a.useParams&&(t=t.replace(a.useParams,function(e,t,n,r){var a;if(o[n]&&o[n].arg&&r)return a=(n+":"+r).replace(/'|\\/g,"_"),o.__exp=o.__exp||{},o.__exp[a]=o[n].text.replace(new RegExp("(^|[^\\w$])"+o[n].arg+"([^\\w$])","g"),"$1"+r+"$2"),t+"def.__exp['"+a+"']"})),(t=new Function("def","return "+t)(o))&&n(a,t,o)})}(t,e,n||{}):e,n=("var out='"+(t.strip?n.replace(/(^|\r|\n)\t* +| +\t*(\r|\n|$)/g," ").replace(/\r|\n|\t|\/\*[\s\S]*?\*\//g,""):n).replace(/'|\\/g,"\\$&").replace(t.interpolate||u,function(e,t){return o.start+c(t)+o.end}).replace(t.encode||u,function(e,t){return r=!0,o.startencode+c(t)+o.end}).replace(t.conditional||u,function(e,t,n){return t?n?"';}else if("+c(n)+"){out+='":"';}else{out+='":n?"';if("+c(n)+"){out+='":"';}out+='"}).replace(t.iterate||u,function(e,t,n,r){return t?(i+=1,a=r||"i"+i,t=c(t),"';var arr"+i+"="+t+";if(arr"+i+"){var "+n+","+a+"=-1,l"+i+"=arr"+i+".length-1;while("+a+"",sameOrigin:""},f=([{name:"NA",value:"inapplicable",priority:0,group:"inapplicable"},{name:"PASS",value:"passed",priority:1,group:"passes"},{name:"CANTTELL",value:"cantTell",priority:2,group:"incomplete"},{name:"FAIL",value:"failed",priority:3,group:"violations"}].forEach(function(e){var t=e.name,n=e.value,r=e.priority,e=e.group;Hr[t]=n,Hr[t+"_PRIO"]=r,Hr[t+"_GROUP"]=e,Hr.results[r]=n,Hr.resultGroups[r]=e,Hr.resultGroupMap[n]=e}),Object.freeze(Hr.results),Object.freeze(Hr.resultGroups),Object.freeze(Hr.resultGroupMap),Object.freeze(Hr),Hr),$r=function(){"object"===("undefined"==typeof console?"undefined":te(console))&&console.log&&Function.prototype.apply.call(console.log,console,arguments)},Ur=/[\t\r\n\f]/g;function Wr(){be(this,Wr),this.parent=void 0}ye(Wr,[{key:"props",get:function(){throw new Error('VirtualNode class must have a "props" object consisting of "nodeType" and "nodeName" properties')}},{key:"attrNames",get:function(){throw new Error('VirtualNode class must have an "attrNames" property')}},{key:"attr",value:function(){throw new Error('VirtualNode class must have an "attr" function')}},{key:"hasAttr",value:function(){throw new Error('VirtualNode class must have a "hasAttr" function')}},{key:"hasClass",value:function(e){var t=this.attr("class");return!!t&&(e=" "+e+" ",0<=(" "+t+" ").replace(Ur," ").indexOf(e))}}]);var p=Wr,Gr={},Yr=(Se(Gr,{DqElement:function(){return Ia},aggregate:function(){return Yr},aggregateChecks:function(){return ea},aggregateNodeResults:function(){return na},aggregateResult:function(){return aa},areStylesSet:function(){return oa},assert:function(){return E},checkHelper:function(){return Pa},clone:function(){return Ba},closest:function(){return Ga},collectResultsFromFrames:function(){return Vo},contains:function(){return Ho},convertSelector:function(){return Ua},cssParser:function(){return ja},deepMerge:function(){return $o},escapeSelector:function(){return m},extendMetaData:function(){return Uo},filterHtmlAttrs:function(){return Gf},finalizeRuleResult:function(){return ta},findBy:function(){return Lo},getAllChecks:function(){return jo},getAncestry:function(){return Ra},getBaseLang:function(){return qp},getCheckMessage:function(){return Kp},getCheckOption:function(){return Xp},getEnvironmentData:function(){return Zp},getFlattenedTree:function(){return Bp},getFrameContexts:function(){return cf},getFriendlyUriEnd:function(){return ca},getNodeAttributes:function(){return da},getNodeFromTree:function(){return g},getPreloadConfig:function(){return zf},getRootNode:function(){return Ko},getRule:function(){return df},getScroll:function(){return ff},getScrollState:function(){return mf},getSelector:function(){return ka},getSelectorData:function(){return Fa},getShadowSelector:function(){return ha},getStandards:function(){return hf},getStyleSheetFactory:function(){return bf},getXpath:function(){return Sa},injectStyle:function(){return vf},isHidden:function(){return yf},isHtmlElement:function(){return wf},isNodeInContext:function(){return Df},isShadowRoot:function(){return Go},isValidLang:function(){return nm},isXHTML:function(){return ma},matchAncestry:function(){return Ef},matches:function(){return La},matchesExpression:function(){return Wa},matchesSelector:function(){return pa},memoize:function(){return n},mergeResults:function(){return zo},nodeLookup:function(){return s},nodeSerializer:function(){return Bo},nodeSorter:function(){return Ff},parseCrossOriginStylesheet:function(){return Tf},parseSameOriginStylesheet:function(){return Nf},parseStylesheet:function(){return kf},performanceTimer:function(){return l},pollyfillElementsFromPoint:function(){return Sf},preload:function(){return Lf},preloadCssom:function(){return If},preloadMedia:function(){return jf},processMessage:function(){return Yp},publishMetaData:function(){return Vf},querySelectorAll:function(){return $f},querySelectorAllFilter:function(){return Mf},queue:function(){return Qa},respondable:function(){return Ro},ruleShouldRun:function(){return Wf},select:function(){return Kf},sendCommandToFrame:function(){return Oo},setScrollState:function(){return Xf},shadowSelect:function(){return Zf},shadowSelectAll:function(){return Jf},shouldPreload:function(){return qf},toArray:function(){return ia},tokenList:function(){return Rp},uniqueArray:function(){return Of},uuid:function(){return po},validInputTypes:function(){return Qf},validLangs:function(){return tm}}),function(t,e,n){return e=e.slice(),n&&e.push(n),n=e.map(function(e){return t.indexOf(e)}).sort(),t[n.pop()]}),Kr=f.CANTTELL_PRIO,Xr=f.FAIL_PRIO,Zr=[],Jr=(Zr[f.PASS_PRIO]=!0,Zr[f.CANTTELL_PRIO]=null,Zr[f.FAIL_PRIO]=!1,["any","all","none"]);function Qr(n,r){Jr.reduce(function(e,t){return e[t]=(n[t]||[]).map(function(e){return r(e,t)}),e},{})}var ea=function(e){var n=Object.assign({},e),r=(Qr(n,function(e,t){var n=void 0===e.result?-1:Zr.indexOf(e.result);e.priority=-1!==n?n:f.CANTTELL_PRIO,"none"===t&&(e.priority===f.PASS_PRIO?e.priority=f.FAIL_PRIO:e.priority===f.FAIL_PRIO&&(e.priority=f.PASS_PRIO))}),{all:n.all.reduce(function(e,t){return Math.max(e,t.priority)},0),none:n.none.reduce(function(e,t){return Math.max(e,t.priority)},0),any:n.any.reduce(function(e,t){return Math.min(e,t.priority)},4)%4}),a=(n.priority=Math.max(r.all,r.none,r.any),[]);return Jr.forEach(function(t){n[t]=n[t].filter(function(e){return e.priority===n.priority&&e.priority===r[t]}),n[t].forEach(function(e){return a.push(e.impact)})}),[Kr,Xr].includes(n.priority)?n.impact=Yr(f.impact,a):n.impact=null,Qr(n,function(e){delete e.result,delete e.priority}),n.result=f.results[n.priority],delete n.priority,n};function ta(t){var n=axe._audit.rules.find(function(e){return e.id===t.id});return n&&n.impact&&t.nodes.forEach(function(t){["any","all","none"].forEach(function(e){(t[e]||[]).forEach(function(e){e.impact=n.impact})})}),Object.assign(t,na(t.nodes)),delete t.nodes,t}var na=function(e){var n={},t=((e=e.map(function(e){if(e.any&&e.all&&e.none)return ea(e);if(Array.isArray(e.node))return ta(e);throw new TypeError("Invalid Result type")}))&&e.length?(t=e.map(function(e){return e.result}),n.result=Yr(f.results,t,n.result)):n.result="inapplicable",f.resultGroups.forEach(function(e){return n[e]=[]}),e.forEach(function(e){var t=f.resultGroupMap[e.result];n[t].push(e)}),f.FAIL_GROUP);return 0===n[t].length&&(t=f.CANTTELL_GROUP),0=t.length/2}(s)?la(s):void 0},da=function(e){return(e.attributes instanceof window.NamedNodeMap?e:e.cloneNode(!1)).attributes},pa=function(e,t){return!!e[ua=ua&&e[ua]?ua:function(e){for(var t,n=["matches","matchesSelector","mozMatchesSelector","webkitMatchesSelector","msMatchesSelector"],r=n.length,a=0;a "+r:i,a=a?a.filter(function(e){return pa(e,r)}):Array.from(n.querySelectorAll(r)),e=e.parentElement}while((1 ")?":root"+r.substring(r.indexOf(" > ")):":root"}function ka(e,t){return ha(Na,e,t)}function Ta(e){var t,n=e.nodeName.toLowerCase(),r=e.parentElement;return r?(t="","head"!==n&&"body"!==n&&1 "+n+t):n}function Ra(e,t){return ha(Ta,e,t)}var Sa=function(e){return function e(t,n){var r,a,o,i;if(!t)return[];if(!n&&9===t.nodeType)return n=[{str:"html"}];if(n=n||[],t.parentNode&&t.parentNode!==t&&(n=e(t.parentNode,n)),t.previousSibling){for(a=1,r=t.previousSibling;1===r.nodeType&&r.nodeName===t.nodeName&&a++,r=r.previousSibling;);1===a&&(a=null)}else if(t.nextSibling)for(r=t.nextSibling;r=1===r.nodeType&&r.nodeName===t.nodeName?(a=1,null):(a=null,r.previousSibling););return 1===t.nodeType&&((o={}).str=t.nodeName.toLowerCase(),(i=t.getAttribute&&m(t.getAttribute("id")))&&1===t.ownerDocument.querySelectorAll("#"+i).length&&(o.id=t.getAttribute("id")),1(n=n||300)&&(n=r.indexOf(">"),r=r.substring(0,n+1)),r):"")}Ma.prototype={get selector(){return this.spec.selector||[ka(this.element,this._options)]},get ancestry(){return this.spec.ancestry||[Ra(this.element)]},get xpath(){return this.spec.xpath||[Sa(this.element)]},get element(){return this._element},toJSON:function(){var e={selector:this.selector,source:this.source,xpath:this.xpath,ancestry:this.ancestry,nodeIndexes:this.nodeIndexes,fromFrame:this.fromFrame};return this._includeElementInJson&&(e.element=this._element),e}},Ma.fromFrame=function(e,t,n){e=Ma.mergeSpecs(e,n);return new Ma(n.element,t,e)},Ma.mergeSpecs=function(e,t){return h({},e,{selector:[].concat(D(t.selector),D(e.selector)),ancestry:[].concat(D(t.ancestry),D(e.ancestry)),xpath:[].concat(D(t.xpath),D(e.xpath)),nodeIndexes:[].concat(D(t.nodeIndexes),D(e.nodeIndexes)),fromFrame:!0})},Ma.setRunOptions=function(e){var t=e.elementRef,e=e.absolutePaths;v.set(_a,{elementRef:t,absolutePaths:e})};var Ia=Ma,Pa=function(t,e,n,r){return{isAsync:!1,async:function(){return this.isAsync=!0,function(e){e instanceof Error==!1?(t.result=e,n(t)):r(e)}},data:function(e){t.data=e},relatedNodes:function(e){window.Node&&(e=e instanceof window.Node||e instanceof p?[e]:ia(e),t.relatedNodes=[],e.forEach(function(e){(e=e instanceof p?e.actualNode:e)instanceof window.Node&&(e=new Ia(e),t.relatedNodes.push(e))}))}}};function Ba(e){return function t(e,n){var r;if(null===e||"object"!==te(e))return e;if(null!=(r=window)&&r.Node&&e instanceof window.Node||null!=(r=window)&&r.HTMLCollection&&e instanceof window.HTMLCollection||"nodeName"in e&&"nodeType"in e&&"ownerDocument"in e)return e;if(n.has(e))return n.get(e);{var a;if(Array.isArray(e))return a=[],n.set(e,a),e.forEach(function(e){a.push(t(e,n))}),a}var o={};n.set(e,o);for(var i in e)o[i]=t(e[i],n);return o}(e,new Map)}(a=new(_e(Dn()).CssSelectorParser)).registerSelectorPseudos("not"),a.registerSelectorPseudos("is"),a.registerNestingOperators(">"),a.registerAttrEqualityMods("^","$","*","~");var ja=a;function La(t,e){return Ua(e).some(function(e){return Wa(t,e)})}function qa(e,t){return i=t,1===(o=e).props.nodeType&&("*"===i.tag||o.props.nodeName===i.tag)&&(a=e,!(o=t).classes||o.classes.every(function(e){return a.hasClass(e.value)}))&&(r=e,!(i=t).attributes||i.attributes.every(function(e){var t=r.attr(e.key);return null!==t&&e.test(t)}))&&(o=e,!(i=t).id||o.props.id===i.id)&&(n=e,!((o=t).pseudos&&!o.pseudos.every(function(e){if("not"===e.name)return!e.expressions.some(function(e){return Wa(n,e)});if("is"===e.name)return e.expressions.some(function(e){return Wa(n,e)});throw new Error("the pseudo selector "+e.name+" has not yet been implemented")})));var n,r,a,o,i}za=/(?=[\-\[\]{}()*+?.\\\^$|,#\s])/g;var za,Va=function(e){return e.replace(za,"\\")},Ha=/\\/g;function $a(e){return e.map(function(e){for(var t=[],n=e.rule;n;)t.push({tag:n.tagName?n.tagName.toLowerCase():"*",combinator:n.nestingOperator||" ",id:n.id,attributes:function(e){if(e)return e.map(function(e){var t,n,r=e.name.replace(Ha,""),a=(e.value||"").replace(Ha,"");switch(e.operator){case"^=":n=new RegExp("^"+Va(a));break;case"$=":n=new RegExp(Va(a)+"$");break;case"~=":n=new RegExp("(^|\\s)"+Va(a)+"(\\s|$)");break;case"|=":n=new RegExp("^"+Va(a)+"(-|$)");break;case"=":t=function(e){return a===e};break;case"*=":t=function(e){return e&&e.includes(a)};break;case"!=":t=function(e){return a!==e};break;default:t=function(e){return null!==e}}return""===a&&/^[*$^]=$/.test(e.operator)&&(t=function(){return!1}),{key:r,value:a,type:void 0===e.value?"attrExist":"attrValue",test:t=t||function(e){return e&&n.test(e)}}})}(n.attrs),classes:function(e){if(e)return e.map(function(e){return{value:e=e.replace(Ha,""),regexp:new RegExp("(^|\\s)"+Va(e)+"(\\s|$)")}})}(n.classNames),pseudos:function(e){if(e)return e.map(function(e){var t;return["is","not"].includes(e.name)&&(t=$a(t=(t=e.value).selectors||[t])),{name:e.name,expressions:t,value:e.value}})}(n.pseudos)}),n=n.rule;return t})}function Ua(e){e=ja.parse(e);return $a(e.selectors||[e])}function Wa(e,t,n){return function e(t,n,r,a){if(!t)return!1;for(var o=Array.isArray(n)?n[r]:n,i=qa(t,o);!i&&a&&t.parent;)i=qa(t=t.parent,o);if(0"].includes(o.combinator))throw new Error("axe.utils.matchesExpression does not support the combinator: "+o.combinator);i=i&&e(t.parent,n,r-1," "===o.combinator)}return i}(e,t,t.length-1,n)}var Ga=function(e,t){for(;e;){if(La(e,t))return e;if(void 0===e.parent)throw new TypeError("Cannot resolve parent for non-DOM nodes");e=e.parent}return null};function Ya(){}function Ka(e){if("function"!=typeof e)throw new TypeError("Queue methods require functions as arguments")}for(var Xa,Za,Ja,Qa=function(){function t(e){r=e,setTimeout(function(){null!=r&&$r("Uncaught error (of queue)",r)},1)}var r,a=[],n=0,o=0,i=Ya,s=!1,l=t;function u(e){return i=Ya,l(e),a}function c(){for(var e=a.length;n>>((3&t)<<3)&255;return Ja}),"function"==typeof window.Buffer?window.Buffer:Array),r=[],no={},ro=0;ro<256;ro++)r[ro]=(ro+256).toString(16).substr(1),no[r[ro]]=ro;function ao(e,t){t=t||0;return r[e[t++]]+r[e[t++]]+r[e[t++]]+r[e[t++]]+"-"+r[e[t++]]+r[e[t++]]+"-"+r[e[t++]]+r[e[t++]]+"-"+r[e[t++]]+r[e[t++]]+"-"+r[e[t++]]+r[e[t++]]+r[e[t++]]+r[e[t++]]+r[e[t++]]+r[e[+t]]}var oo=[1|(a=Xa())[0],a[1],a[2],a[3],a[4],a[5]],io=16383&(a[6]<<8|a[7]),so=0,lo=0;function uo(e,t,n){var r=t&&n||0,a=t||[],n=null!=(e=e||{}).clockseq?e.clockseq:io,o=null!=e.msecs?e.msecs:(new Date).getTime(),i=null!=e.nsecs?e.nsecs:lo+1,s=o-so+(i-lo)/1e4;if(s<0&&null==e.clockseq&&(n=n+1&16383),1e4<=(i=(s<0||so>>24&255,a[r++]=s>>>16&255,a[r++]=s>>>8&255,a[r++]=255&s,o/4294967296*1e4&268435455),l=(a[r++]=i>>>8&255,a[r++]=255&i,a[r++]=i>>>24&15|16,a[r++]=i>>>16&255,a[r++]=n>>>8|128,a[r++]=255&n,e.node||oo),u=0;u<6;u++)a[r+u]=l[u];return t||ao(a)}function co(e,t,n){var r=t&&n||0,a=("string"==typeof e&&(t="binary"==e?new to(16):null,e=null),(e=e||{}).random||(e.rng||Xa)());if(a[6]=15&a[6]|64,a[8]=63&a[8]|128,t)for(var o=0;o<16;o++)t[r+o]=a[o];return t||ao(a)}(a=co).v1=uo,a.v4=co,a.parse=function(e,t,n){var r=t&&n||0,a=0;for(t=t||[],e.toLowerCase().replace(/[0-9a-f]{2}/g,function(e){a<16&&(t[r+a++]=no[e])});a<16;)t[r+a++]=0;return t},a.unparse=ao,a.BufferClass=to,axe._uuid=uo();var po=co,fo=Object.freeze(["EvalError","RangeError","ReferenceError","SyntaxError","TypeError","URIError"]);function mo(e){var t,n,r,a;try{t=JSON.parse(e)}catch(e){return}if(null!==(e=t)&&"object"===te(e)&&"string"==typeof e.channelId&&e.source===ho())return n=(e=t).topic,r=e.channelId,a=e.messageId,e=e.keepalive,{topic:n,message:"object"===te(t.error)?function(e){var t=e.message||"Unknown error occurred",n=fo.includes(e.name)?e.name:"Error",n=window[n]||Error;e.stack&&(t+="\n"+e.stack.replace(e.message,""));return new n(t)}(t.error):t.payload,messageId:a,channelId:r,keepalive:!!e}}function ho(){var e="axeAPI",t="";return(e=void 0!==axe&&axe._audit&&axe._audit.application?axe._audit.application:e)+"."+(t=void 0!==axe?axe.version:t)}function go(e){vo(e),E(window.parent===e,"Source of the response must be the parent window.")}function bo(e){vo(e),E(e.parent===window,"Respondable target must be a frame in the current window")}function vo(e){E(window!==e,"Messages can not be sent to the same window.")}var yo={},wo=[];function Do(){var e="".concat(co(),":").concat(co());return wo.includes(e)?Do():(wo.push(e),e)}function xo(n,e,t,r){var a,o,i,s,l;return(t?go:bo)(n),e.message instanceof Error&&!t?(axe.log(e.message),!1):(o=h({messageId:Do()},e),l=o.topic,i=o.channelId,s=o.message,i={channelId:i,topic:l,messageId:o.messageId,keepalive:!!o.keepalive,source:ho()},s instanceof Error?i.error={name:s.name,message:s.message,stack:s.stack}:i.payload=s,a=JSON.stringify(i),!(!(l=axe._audit.allowedOrigins)||!l.length||("function"==typeof r&&function(e,t,n){var r=!(2(0|t.left)&&(0|e.top)<(0|t.bottom)&&(0|e.bottom)>(0|t.top)}var ti=n(function(e){var t=[];return e?("hidden"===e.getComputedStylePropertyValue("overflow")&&t.push(e),t.concat(ti(e.parent))):t}),ni=ti,ri=/rect\s*\(([0-9]+)px,?\s*([0-9]+)px,?\s*([0-9]+)px,?\s*([0-9]+)px\s*\)/,ai=/(\w+)\((\d+)/;function oi(e){return["style","script","noscript","template"].includes(e.props.nodeName)}function ii(e){return"area"!==e.props.nodeName&&"none"===e.getComputedStylePropertyValue("display")}function si(e){return!(1=t)return!0;return!1}},Fi=[ci,di,pi,fi,Ei];function Ci(e){return e=s(e).vNode,Ai(e)}var Ai=n(function(t,n){return t.actualNode&&"area"===t.props.nodeName?!mi(t,Ai):!(bi(t,{skipAncestors:!0,isAncestor:n})||t.actualNode&&Fi.some(function(e){return e(t,{isAncestor:n})}))&&(!t.parent||Ai(t.parent,!0))});function Ni(e,t){var n=Math.min(e.top,t.top),r=Math.max(e.right,t.right),a=Math.max(e.bottom,t.bottom),e=Math.min(e.left,t.left);return new window.DOMRect(e,n,r-e,a-n)}function ki(e,t){var n=e.x,e=e.y,r=t.top,a=t.right,o=t.bottom,t=t.left;return r<=e&&n<=a&&e<=o&&t<=n}var Ti={};function Ri(e,t){var n=Math.max(e.left,t.left),r=Math.min(e.right,t.right),a=Math.max(e.top,t.top),e=Math.min(e.bottom,t.bottom);return r<=n||e<=a?null:new window.DOMRect(n,a,r-n,e-a)}function Si(e){var t=e.left,n=e.top,r=e.width,e=e.height;return new window.DOMPoint(t+r/2,n+e/2)}Se(Ti,{getBoundingRect:function(){return Ni},getIntersectionRect:function(){return Ri},getOffset:function(){return Mi},getRectCenter:function(){return Si},hasVisualOverlap:function(){return Pi},isPointInRect:function(){return ki},rectHasMinimumSize:function(){return _i},rectsOverlap:function(){return ei},splitRects:function(){return Bi}});var Oi=.05;function _i(e,t){var n=t.width,t=t.height;return e<=n+Oi&&e<=t+Oi}function Mi(e,t){var n=2t.right?t.right:e.x;t=e.yt.bottom?t.bottom:e.y;return{x:n,y:t}}(i,u)),s=Math.min(s,c)}}catch(e){l.e(e)}finally{l.f()}return _i(2*n,ys(t))?s:(r=Ii(i,Si(a.reduce(Ni)))-n,Math.max(0,Math.min(s,r)))}function Ii(e,t){return Math.hypot(e.x-t.x,e.y-t.y)}function Pi(e,t){var n=e.boundingClientRect,r=t.boundingClientRect;return!(n.left>=r.right||n.right<=r.left||n.top>=r.bottom||n.bottom<=r.top)&&0t.top,s=rt.left,l=[];ji(t.top,n,a)&&s&&l.push({top:n,left:r,bottom:t.top,right:o});ji(t.right,r,o)&&i&&l.push({top:n,left:t.right,bottom:a,right:o});ji(t.bottom,n,a)&&s&&l.push({top:t.bottom,right:o,bottom:a,left:r});ji(t.left,r,o)&&i&&l.push({top:n,left:r,bottom:a,right:t.left});if(0===l.length){if(function(e,t){return e.top>=t.top&&e.left>=t.left&&e.bottom<=t.bottom&&e.right<=t.right}(e,t))return[];l.push(e)}return l.map(Li)}(t,n))},[])).length)return{v:[]}}var i;for(n.s();!(r=n.n()).done;)if(i=o())return i.v}catch(e){n.e(e)}finally{n.f()}return a}var ji=function(e,t,n){return te._stackingOrder[r].stackLevel)return 1;if(t._stackingOrder[r].stackLevel=Math.floor(t)&&s=Math.floor(n)})}),o=e.container;return o&&(a=fs(o._grid,o.boundingClientRect,!0).concat(a)),a=r?a:a.sort(ds).map(function(e){return e.actualNode}).concat(document.documentElement).filter(function(e,t,n){return n.indexOf(e)===t})}var ms=function(e){Ui();var t=(e=g(e))._grid;return t?fs(t,e.boundingClientRect):[]},hs=function(e){return $f(e,"*").filter(function(e){var t=e.isFocusable,e=e.actualNode.getAttribute("tabindex");return(e=e&&!isNaN(parseInt(e,10))?parseInt(e):null)?t&&0<=e:t})},gs=function(e){var t=s(e).vNode;if(t&&!is(t))switch(t.props.nodeName){case"a":case"area":if(t.hasAttr("href"))return!0;break;case"input":return"hidden"!==t.props.type;case"textarea":case"select":case"summary":case"button":return!0;case"details":return!$f(t,"summary").length}return!1};function y(e){var e=s(e).vNode;return 1===e.props.nodeType&&!(is(e)||!gs(e)&&(!(e=e.attr("tabindex"))||isNaN(parseInt(e,10))))}function bs(e){e=s(e).vNode;return 1===e.props.nodeType&&!(parseInt(e.attr("tabindex",10))<=-1)&&y(e)}var vs=n(function(t){var e=t.boundingClientRect,n=Qi(t).filter(function(e){return Pi(t,e)&&"none"!==e.getComputedStylePropertyValue("pointer-events")&&(e=e,!(t.actualNode.contains(e.actualNode)&&!bs(e)))});return n.length?(n=n.map(function(e){return e.boundingClientRect}),Bi(e,n)):[e]}),ys=n(function(e,t){return function(e,r){return e.reduce(function(e,t){var n=_i(r,e);return n!==_i(r,t)?n?e:t:(n=e.width*e.height,t.width*t.height?@\[\]^_`{|}~\xb1]/g}function Pl(){return/[\uDB80-\uDBBF][\uDC00-\uDFFF]/g}function Bl(){return/[\xAD\u0600-\u0605\u061C\u06DD\u070F\u08E2\u180E\u200B-\u200F\u202A-\u202E\u2060-\u2064\u2066-\u206F\uFEFF\uFFF9-\uFFFB]|\uD804[\uDCBD\uDCCD]|\uD80D[\uDC30-\uDC38]|\uD82F[\uDCA0-\uDCA3]|\uD834[\uDD73-\uDD7A]|\uDB40[\uDC01\uDC20-\uDC7F]/g}function jl(){return/[#*0-9]\uFE0F?\u20E3|[\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23ED-\u23EF\u23F1\u23F2\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB\u25FC\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692\u2694-\u2697\u2699\u269B\u269C\u26A0\u26A7\u26AA\u26B0\u26B1\u26BD\u26BE\u26C4\u26C8\u26CF\u26D1\u26E9\u26F0-\u26F5\u26F7\u26F8\u26FA\u2702\u2708\u2709\u270F\u2712\u2714\u2716\u271D\u2721\u2733\u2734\u2744\u2747\u2757\u2763\u27A1\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B55\u3030\u303D\u3297\u3299]\uFE0F?|[\u261D\u270C\u270D](?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?|[\u270A\u270B](?:\uD83C[\uDFFB-\uDFFF])?|[\u23E9-\u23EC\u23F0\u23F3\u25FD\u2693\u26A1\u26AB\u26C5\u26CE\u26D4\u26EA\u26FD\u2705\u2728\u274C\u274E\u2753-\u2755\u2795-\u2797\u27B0\u27BF\u2B50]|\u26D3\uFE0F?(?:\u200D\uD83D\uDCA5)?|\u26F9(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|\u2764\uFE0F?(?:\u200D(?:\uD83D\uDD25|\uD83E\uDE79))?|\uD83C(?:[\uDC04\uDD70\uDD71\uDD7E\uDD7F\uDE02\uDE37\uDF21\uDF24-\uDF2C\uDF36\uDF7D\uDF96\uDF97\uDF99-\uDF9B\uDF9E\uDF9F\uDFCD\uDFCE\uDFD4-\uDFDF\uDFF5\uDFF7]\uFE0F?|[\uDF85\uDFC2\uDFC7](?:\uD83C[\uDFFB-\uDFFF])?|[\uDFC4\uDFCA](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDFCB\uDFCC](?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDCCF\uDD8E\uDD91-\uDD9A\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF43\uDF45-\uDF4A\uDF4C-\uDF7C\uDF7E-\uDF84\uDF86-\uDF93\uDFA0-\uDFC1\uDFC5\uDFC6\uDFC8\uDFC9\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF8-\uDFFF]|\uDDE6\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF]|\uDDE7\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF]|\uDDE8\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF]|\uDDE9\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF]|\uDDEA\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA]|\uDDEB\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7]|\uDDEC\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE]|\uDDED\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA]|\uDDEE\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9]|\uDDEF\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5]|\uDDF0\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF]|\uDDF1\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE]|\uDDF2\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF]|\uDDF3\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF]|\uDDF4\uD83C\uDDF2|\uDDF5\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE]|\uDDF6\uD83C\uDDE6|\uDDF7\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC]|\uDDF8\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF]|\uDDF9\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF]|\uDDFA\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF]|\uDDFB\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA]|\uDDFC\uD83C[\uDDEB\uDDF8]|\uDDFD\uD83C\uDDF0|\uDDFE\uD83C[\uDDEA\uDDF9]|\uDDFF\uD83C[\uDDE6\uDDF2\uDDFC]|\uDF44(?:\u200D\uD83D\uDFEB)?|\uDF4B(?:\u200D\uD83D\uDFE9)?|\uDFC3(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?|\uDFF3\uFE0F?(?:\u200D(?:\u26A7\uFE0F?|\uD83C\uDF08))?|\uDFF4(?:\u200D\u2620\uFE0F?|\uDB40\uDC67\uDB40\uDC62\uDB40(?:\uDC65\uDB40\uDC6E\uDB40\uDC67|\uDC73\uDB40\uDC63\uDB40\uDC74|\uDC77\uDB40\uDC6C\uDB40\uDC73)\uDB40\uDC7F)?)|\uD83D(?:[\uDC3F\uDCFD\uDD49\uDD4A\uDD6F\uDD70\uDD73\uDD76-\uDD79\uDD87\uDD8A-\uDD8D\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA\uDECB\uDECD-\uDECF\uDEE0-\uDEE5\uDEE9\uDEF0\uDEF3]\uFE0F?|[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDC8F\uDC91\uDCAA\uDD7A\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC](?:\uD83C[\uDFFB-\uDFFF])?|[\uDC6E\uDC70\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4\uDEB5](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDD74\uDD90](?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?|[\uDC00-\uDC07\uDC09-\uDC14\uDC16-\uDC25\uDC27-\uDC3A\uDC3C-\uDC3E\uDC40\uDC44\uDC45\uDC51-\uDC65\uDC6A\uDC79-\uDC7B\uDC7D-\uDC80\uDC84\uDC88-\uDC8E\uDC90\uDC92-\uDCA9\uDCAB-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDDA4\uDDFB-\uDE2D\uDE2F-\uDE34\uDE37-\uDE41\uDE43\uDE44\uDE48-\uDE4A\uDE80-\uDEA2\uDEA4-\uDEB3\uDEB7-\uDEBF\uDEC1-\uDEC5\uDED0-\uDED2\uDED5-\uDED7\uDEDC-\uDEDF\uDEEB\uDEEC\uDEF4-\uDEFC\uDFE0-\uDFEB\uDFF0]|\uDC08(?:\u200D\u2B1B)?|\uDC15(?:\u200D\uD83E\uDDBA)?|\uDC26(?:\u200D(?:\u2B1B|\uD83D\uDD25))?|\uDC3B(?:\u200D\u2744\uFE0F?)?|\uDC41\uFE0F?(?:\u200D\uD83D\uDDE8\uFE0F?)?|\uDC68(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDC68\uDC69]\u200D\uD83D(?:\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?)|[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?)|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFC-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFD-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFD\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?\uDC68\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D\uDC68\uD83C[\uDFFB-\uDFFE])))?))?|\uDC69(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:\uDC8B\u200D\uD83D)?[\uDC68\uDC69]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D(?:[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?|\uDC69\u200D\uD83D(?:\uDC66(?:\u200D\uD83D\uDC66)?|\uDC67(?:\u200D\uD83D[\uDC66\uDC67])?))|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFC-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB\uDFFD-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB-\uDFFD\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D\uD83D(?:[\uDC68\uDC69]|\uDC8B\u200D\uD83D[\uDC68\uDC69])\uD83C[\uDFFB-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83D[\uDC68\uDC69]\uD83C[\uDFFB-\uDFFE])))?))?|\uDC6F(?:\u200D[\u2640\u2642]\uFE0F?)?|\uDD75(?:\uFE0F|\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|\uDE2E(?:\u200D\uD83D\uDCA8)?|\uDE35(?:\u200D\uD83D\uDCAB)?|\uDE36(?:\u200D\uD83C\uDF2B\uFE0F?)?|\uDE42(?:\u200D[\u2194\u2195]\uFE0F?)?|\uDEB6(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?)|\uD83E(?:[\uDD0C\uDD0F\uDD18-\uDD1F\uDD30-\uDD34\uDD36\uDD77\uDDB5\uDDB6\uDDBB\uDDD2\uDDD3\uDDD5\uDEC3-\uDEC5\uDEF0\uDEF2-\uDEF8](?:\uD83C[\uDFFB-\uDFFF])?|[\uDD26\uDD35\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD\uDDCF\uDDD4\uDDD6-\uDDDD](?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDDDE\uDDDF](?:\u200D[\u2640\u2642]\uFE0F?)?|[\uDD0D\uDD0E\uDD10-\uDD17\uDD20-\uDD25\uDD27-\uDD2F\uDD3A\uDD3F-\uDD45\uDD47-\uDD76\uDD78-\uDDB4\uDDB7\uDDBA\uDDBC-\uDDCC\uDDD0\uDDE0-\uDDFF\uDE70-\uDE7C\uDE80-\uDE88\uDE90-\uDEBD\uDEBF-\uDEC2\uDECE-\uDEDB\uDEE0-\uDEE8]|\uDD3C(?:\u200D[\u2640\u2642]\uFE0F?|\uD83C[\uDFFB-\uDFFF])?|\uDDCE(?:\uD83C[\uDFFB-\uDFFF])?(?:\u200D(?:[\u2640\u2642]\uFE0F?(?:\u200D\u27A1\uFE0F?)?|\u27A1\uFE0F?))?|\uDDD1(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1|\uDDD1\u200D\uD83E\uDDD2(?:\u200D\uD83E\uDDD2)?|\uDDD2(?:\u200D\uD83E\uDDD2)?))|\uD83C(?:\uDFFB(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFC-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?|\uDFFC(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB\uDFFD-\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?|\uDFFD(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?|\uDFFE(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB-\uDFFD\uDFFF]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?|\uDFFF(?:\u200D(?:[\u2695\u2696\u2708]\uFE0F?|\u2764\uFE0F?\u200D(?:\uD83D\uDC8B\u200D)?\uD83E\uDDD1\uD83C[\uDFFB-\uDFFE]|\uD83C[\uDF3E\uDF73\uDF7C\uDF84\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E(?:[\uDDAF\uDDBC\uDDBD](?:\u200D\u27A1\uFE0F?)?|[\uDDB0-\uDDB3]|\uDD1D\u200D\uD83E\uDDD1\uD83C[\uDFFB-\uDFFF])))?))?|\uDEF1(?:\uD83C(?:\uDFFB(?:\u200D\uD83E\uDEF2\uD83C[\uDFFC-\uDFFF])?|\uDFFC(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB\uDFFD-\uDFFF])?|\uDFFD(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])?|\uDFFE(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB-\uDFFD\uDFFF])?|\uDFFF(?:\u200D\uD83E\uDEF2\uD83C[\uDFFB-\uDFFE])?))?)/g}var Ll=function(e,t){var n=t.emoji,r=t.nonBmp,t=t.punctuations,a=!1;return n&&(a=a||jl().test(e)),r&&(a=a||Ml().test(e)||Pl().test(e)||Bl().test(e)),a=t?a||Il().test(e):a};function ql(e){var t=1=n){if(i.numLigatures/i.occurrences==1)return!0;if(0===i.numLigatures)return!1}i.occurrences++;var n=30,l="".concat(n,"px ").concat(s),u=(a.font=l,r.charAt(0)),c=a.measureText(u).width;if(0===c)return i.numLigatures++,!0;c<30&&(c*=d=30/c,l="".concat(n*=d,"px ").concat(s)),o.width=c,o.height=n,a.font=l,a.textAlign="left",a.textBaseline="top",a.fillText(u,0,0);var d=new Uint32Array(a.getImageData(0,0,c,n).data.buffer);if(!d.some(function(e){return e}))return i.numLigatures++,!0;a.clearRect(0,0,c,n),a.fillText(r,0,0);var p=new Uint32Array(a.getImageData(0,0,c,n).data.buffer),s=d.reduce(function(e,t,n){return 0===t&&0===p[n]||0!==t&&0!==p[n]?e:++e},0),o=r.split("").reduce(function(e,t){return e+a.measureText(t).width},0),l=a.measureText(r).width;return t<=s/d.length&&t<=1-l/o&&(i.numLigatures++,!0)}function zl(n){var e,t,r,a,o,i=function(e,t){t.startNode||(t=h({startNode:e},t));1===e.props.nodeType&&t.inLabelledByContext&&void 0===t.includeHidden&&(t=h({includeHidden:!C(e)},t));return t}(n,1a.length))},vu=function(e){if(e=(e=e||{}).modalPercent||.75,v.get("isModalOpen"))return v.get("isModalOpen");if(Mf(axe._tree[0],"dialog, [role=dialog], [aria-modal=true]",Ci).length)return v.set("isModalOpen",!0),!0;for(var t,n=xi(window),r=n.width*e,a=n.height*e,e=(n.width-r)/2,o=(n.height-a)/2,i=[{x:e,y:o},{x:n.width-e,y:o},{x:n.width/2,y:n.height/2},{x:e,y:n.height-o},{x:n.width-e,y:n.height-o}].map(function(e){return Array.from(document.elementsFromPoint(e.x,e.y))}),s=0;s=r&&parseInt(e.height,10)>=a&&"none"!==e.getPropertyValue("pointer-events")&&("absolute"===e.position||"fixed"===e.position)});if(t&&i.every(function(e){return e.includes(t)}))return v.set("isModalOpen",!0),{v:!0}}())return t.v;v.set("isModalOpen",void 0)};function yu(e){var t,n=1s.top+n)a=Math.max(a,s.bottom);else{if(0!==o)return!0;a=s.bottom,o++}}}catch(e){i.e(e)}finally{i.f()}return!1}var wu,Du,xu=function(e){return e instanceof window.Node},Eu="color.incompleteData",Fu={set:function(e,t){if("string"!=typeof e)throw new Error("Incomplete data: key must be a string");var n=v.get(Eu,function(){return{}});return t&&(n[e]=t),n[e]},get:function(e){var t=v.get(Eu);return null==t?void 0:t[e]},clear:function(){v.set(Eu,{})}},Cu=function(e,t){var n=e.nodeName.toUpperCase();return["IMG","CANVAS","OBJECT","IFRAME","VIDEO","SVG"].includes(n)?(Fu.set("bgColor","imgNode"),!0):((e="none"!==(n=(t=t||window.getComputedStyle(e)).getPropertyValue("background-image")))&&(t=/gradient/.test(n),Fu.set("bgColor",t?"bgGradient":"bgImage")),e)},Ns=(Se(a={},{Colorjs:function(){return ep},CssSelectorParser:function(){return ku.CssSelectorParser},doT:function(){return Tu.default},emojiRegexText:function(){return jl},memoize:function(){return Ru.default}}),_e(xn())),xn=_e(En()),En=(_e(Fn()),_e(Fr())),Fn=_e(t());function Au(e){return"function"==typeof e||"[object Function]"===wu.call(e)}function Nu(e){return e=function(e){e=Number(e);return isNaN(e)?0:0!==e&&isFinite(e)?(0>>0,a=arguments[1],o=0;o>>0,o=0;o>>0,r=2<=arguments.length?arguments[1]:void 0,a=0;athis.length)&&-1!==this.indexOf(e,t)}),Array.prototype.flat||Object.defineProperty(Array.prototype,"flat",{configurable:!0,value:function n(){var r=isNaN(arguments[0])?1:Number(arguments[0]);return r?Array.prototype.reduce.call(this,function(e,t){return Array.isArray(t)?e.push.apply(e,n.call(t,r-1)):e.push(t),e},[]):Array.prototype.slice.call(this)},writable:!0}),!window.Node||"isConnected"in window.Node.prototype||Object.defineProperty(window.Node.prototype,"isConnected",{get:function(){return!(this.ownerDocument&&this.ownerDocument.compareDocumentPosition(this)&this.DOCUMENT_POSITION_DISCONNECTED)}});var ku=_e(Dn()),Tu=_e(Vr()),Ru=_e(bn());function A(e,n){var t=e.length,r=(Array.isArray(e[0])||(e=[e]),(n=Array.isArray(n[0])?n:n.map(function(e){return[e]}))[0].length),a=n[0].map(function(e,t){return n.map(function(e){return e[t]})}),e=e.map(function(i){return a.map(function(e){var t=0;if(Array.isArray(i))for(var n=0;n)\[(-?[.\d]+),\s*(-?[.\d]+)\]?$/);return n?((t=new String(n[1])).range=[+n[2],+n[3]],t):e})})}function Hu(e,t,n){return Math.max(Math.min(n,t),e)}function $u(e,t){return Math.sign(e)===Math.sign(t)?e:-e}function Uu(e,t){return $u(Math.pow(Math.abs(e),t),e)}function Wu(e,t){return 0===t?0:e/t}function Gu(e,t){for(var n=2>1;e[a]","",""]);function tc(e,l,u,c){return Object.entries(e.coords).map(function(e,t){var n,r,e=x(e,2),a=e[0],e=e[1],o=l.coordGrammar[t],i=c[t],s=null==i?void 0:i.type,o=i.none?o.find(function(e){return ec.has(e)}):o.find(function(e){return e==s});if(o)return n=o.range,r=e.range||e.refRange,(n=""===s?n||[0,1]:n)&&r&&(c[t]=zu(n,r,c[t])),o;throw n=e.name||a,new TypeError("".concat(null!=s?s:i.raw," not allowed for ").concat(n," in ").concat(u,"()"))})}function nc(e){var t=(1"==t?(n=[0,100],r="%"):""==t&&(r="deg"),{fromRange:e,toRange:n,suffix:r}}),a.serializeCoords=function(e,a){return e.map(function(e,t){var t=o[t],n=t.fromRange,r=t.toRange,t=t.suffix;return e=_u(e=n&&r?zu(n,r,e):e,{precision:a,unit:t})})}),a}Me(R,"registry",{}),Me(R,"DEFAULT_FORMAT",{type:"functions",name:"color"});var oc=new R({id:"xyz-d65",name:"XYZ D65",coords:{x:{name:"X"},y:{name:"Y"},z:{name:"Z"}},white:"D65",formats:{color:{ids:["xyz-d65","xyz"]}},aliases:["xyz"]}),t=(pe(ic,R),ye(ic));function ic(t){var n;return be(this,ic),t.coords||(t.coords={r:{range:[0,1],name:"Red"},g:{range:[0,1],name:"Green"},b:{range:[0,1],name:"Blue"}}),t.base||(t.base=oc),t.toXYZ_M&&t.fromXYZ_M&&(null==t.toBase&&(t.toBase=function(e){e=A(t.toXYZ_M,e);return e=n.white!==n.base.white?Qu(n.white,n.base.white,e):e}),null==t.fromBase)&&(t.fromBase=function(e){return e=Qu(n.base.white,n.white,e),A(t.fromXYZ_M,e)}),null==t.referred&&(t.referred="display"),n=le(this,ic,[t])}function sc(e,t){return e=N(e),!t||e.space.equals(t)?e.coords.slice():(t=R.get(t)).from(e)}function lc(e,t){e=N(e);var t=R.resolveCoord(t,e.space),n=t.space,t=t.index;return sc(e,n)[t]}function uc(e,t,n){return e=N(e),t=R.get(t),e.coords=t.to(e.space,n),e}function cc(e,t,n){if(e=N(e),2===arguments.length&&"object"===Ou(t)){var r,a=t;for(r in a)cc(e,r,a[r])}else{"function"==typeof n&&(n=n(lc(e,t)));var o=R.resolveCoord(t,e.space),i=o.space,o=o.index,s=sc(e,i);s[o]=n,uc(e,i,s)}return e}cc.returns=uc.returns="color";var En=new R({id:"xyz-d50",name:"XYZ D50",white:"D50",base:oc,fromBase:function(e){return Qu(oc.white,"D50",e)},toBase:function(e){return Qu("D50",oc.white,e)}}),dc=24389/27,pc=Zu.D50,fc=new R({id:"lab",name:"Lab",coords:{l:{refRange:[0,100],name:"Lightness"},a:{refRange:[-125,125]},b:{refRange:[-125,125]}},white:pc,base:En,fromBase:function(e){e=e.map(function(e,t){return e/pc[t]}).map(function(e){return 216/24389 | "," | [-1,1]"," | [-1,1]"]}}});function mc(e){return(e%360+360)%360}var hc=new R({id:"lch",name:"LCH",coords:{l:{refRange:[0,100],name:"Lightness"},c:{refRange:[0,150],name:"Chroma"},h:{refRange:[0,360],type:"angle",name:"Hue"}},base:fc,fromBase:function(e){var e=x(e,3),t=e[0],n=e[1],e=e[2],r=Math.abs(n)<.02&&Math.abs(e)<.02?NaN:180*Math.atan2(e,n)/Math.PI;return[t,Math.sqrt(Math.pow(n,2)+Math.pow(e,2)),mc(r)]},toBase:function(e){var e=x(e,3),t=e[0],n=e[1],e=e[2];return n<0&&(n=0),isNaN(e)&&(e=0),[t,n*Math.cos(e*Math.PI/180),n*Math.sin(e*Math.PI/180)]},formats:{lch:{coords:[" | "," | "," | "]}}}),gc=Math.pow(25,7),bc=Math.PI,vc=180/bc,yc=bc/180;function wc(e){var t=e*e;return t*t*t*e}function Dc(e,t){var n=2 | "," | [-1,1]"," | [-1,1]"]}}});function Nc(e,t){var n=x(N([e,t]),2);e=n[0],t=n[1];var n=x(Ac.from(e),3),e=n[0],r=n[1],n=n[2],t=x(Ac.from(t),3),a=t[0],r=r-t[1],n=n-t[2];return Math.sqrt(Math.pow(e-a,2)+Math.pow(r,2)+Math.pow(n,2))}var kc=75e-6;function Tc(e,t){var n=(2 | "," | [-1,1]"," | [-1,1]"]}}}),Uc=new R({id:"jzczhz",name:"JzCzHz",coords:{jz:{refRange:[0,1],name:"Jz"},cz:{refRange:[0,1],name:"Chroma"},hz:{refRange:[0,360],type:"angle",name:"Hue"}},base:Ns,fromBase:function(e){var e=x(e,3),t=e[0],n=e[1],e=e[2],r=Math.abs(n)<2e-4&&Math.abs(e)<2e-4?NaN:180*Math.atan2(e,n)/Math.PI;return[t,Math.sqrt(Math.pow(n,2)+Math.pow(e,2)),mc(r)]},toBase:function(e){return[e[0],e[1]*Math.cos(e[2]*Math.PI/180),e[1]*Math.sin(e[2]*Math.PI/180)]}}),Wc=2610/16384,Gc=[[.3592832590121217,.6976051147779502,-.035891593232029],[-.1920808463704993,1.100476797037432,.0753748658519118],[.0070797844607479,.0748396662186362,.8433265453898765]],Yc=[[.5,.5,0],[6610/4096,-13613/4096,7003/4096],[17933/4096,-17390/4096,-543/4096]],Kc=[[.9999999999999998,.0086090370379328,.111029625003026],[.9999999999999998,-.0086090370379328,-.1110296250030259],[.9999999999999998,.5600313357106791,-.3206271749873188]],Xc=[[2.0701522183894223,-1.3263473389671563,.2066510476294053],[.3647385209748072,.6805660249472273,-.0453045459220347],[-.0497472075358123,-.0492609666966131,1.1880659249923042]],Zc=new R({id:"ictcp",name:"ICTCP",coords:{i:{refRange:[0,1],name:"I"},ct:{refRange:[-.5,.5],name:"CT"},cp:{refRange:[-.5,.5],name:"CP"}},base:Fn,fromBase:function(e){var e=A(Gc,e),t=e;return t=e.map(function(e){var t=.8359375+2413/128*Math.pow(e/1e4,Wc),e=1+18.6875*Math.pow(e/1e4,Wc);return Math.pow(t/e,2523/32)}),A(Yc,t)},toBase:function(e){e=A(Kc,e).map(function(e){var t=Math.max(Math.pow(e,32/2523)-.8359375,0),e=2413/128-18.6875*Math.pow(e,32/2523);return 1e4*Math.pow(t/e,16384/2610)});return A(Xc,e)}}),xn=Zu.D65,Jc=.42,Qc=1/Jc,e1=2*Math.PI,t1=[[.401288,.650173,-.051461],[-.250268,1.204414,.045854],[-.002079,.048952,.953127]],n1=[[1.8620678550872327,-1.0112546305316843,.14918677544445175],[.38752654323613717,.6214474419314753,-.008973985167612518],[-.015841498849333856,-.03412293802851557,1.0499644368778496]],r1=[[460,451,288],[460,-891,-261],[460,-220,-6300]],a1={dark:[.8,.525,.8],dim:[.9,.59,.9],average:[1,.69,1]},o1={h:[20.14,90,164.25,237.53,380.14],e:[.8,.7,1,1.2,.8],H:[0,100,200,300,400]},i1=180/Math.PI,s1=Math.PI/180;function l1(e,n){return e.map(function(e){var t=Uu(n*Math.abs(e)*.01,Jc);return 400*$u(t,e)/(t+27.13)})}function u1(e,t,n,r,a){var o={},e=(o.discounting=a,o.refWhite=e,o.surround=r,e.map(function(e){return 100*e})),i=(o.la=t,o.yb=n,e[1]),t=A(t1,e),n=(r=a1[o.surround])[0];o.c=r[1],o.nc=r[2];var e=Math.pow(1/(5*o.la+1),4),s=(o.fl=e*o.la+.1*(1-e)*(1-e)*Math.cbrt(5*o.la),o.flRoot=Math.pow(o.fl,.25),o.n=o.yb/i,o.z=1.48+Math.sqrt(o.n),o.nbb=.725*Math.pow(o.n,-.2),o.ncb=o.nbb,a?1:Math.max(Math.min(n*(1-1/3.6*Math.exp((-o.la-42)/92)),1),0));o.dRgb=t.map(function(e){return Lu(1,i/e,s)}),o.dRgbInv=o.dRgb.map(function(e){return 1/e});r=l1(t.map(function(e,t){return e*o.dRgb[t]}),o.fl);return o.aW=o.nbb*(2*r[0]+r[1]+.05*r[2]),o}var c1=u1(xn,64/Math.PI*.2,20,"average",!1);function d1(e,n){if(!(void 0!==e.J^void 0!==e.Q))throw new Error("Conversion requires one and only one: 'J' or 'Q'");if(!(void 0!==e.C^void 0!==e.M^void 0!==e.s))throw new Error("Conversion requires one and only one: 'C', 'M' or 's'");var t,r,a,o,i,s,l;if(void 0!==e.h^void 0!==e.H)return 0===e.J||0===e.Q?[0,0,0]:(s=void(s=0)!==e.h?mc(e.h)*s1:(a=((a=e.H)%400+400)%400,i=Math.floor(.01*a),a%=100,r=(t=x(o1.h.slice(i,i+2),2))[0],t=t[1],o=(i=x(o1.e.slice(i,i+2),2))[0],i=i[1],mc((a*(i*r-o*t)-100*r*i)/(a*(i-o)-100*i))*s1),t=Math.cos(s),r=Math.sin(s),void(a=0)!==e.J?a=.1*Uu(e.J,.5):void 0!==e.Q&&(a=.25*n.c*e.Q/((n.aW+4)*n.flRoot)),void(o=0)!==e.C?o=e.C/a:void 0!==e.M?o=e.M/n.flRoot/a:void 0!==e.s&&(o=4e-4*Math.pow(e.s,2)*(n.aW+4)/n.c),i=Uu(o*Math.pow(1.64-Math.pow(.29,n.n),-.73),10/9),e=.25*(Math.cos(s+2)+3.8),s=n.aW*Uu(a,2/n.c/n.z),e=5e4/13*n.nc*n.ncb*e,e=23*(.305+(s=s/n.nbb))*Wu(i,23*e+i*(11*t+108*r)),s=A(r1,[s,e*t,e*r]).map(function(e){return+e/1403}),e=n.fl,l=100/e*Math.pow(27.13,Qc),e=s.map(function(e){var t=Math.abs(e);return $u(l*Uu(t/(400-t),Qc),e)}),A(n1,e.map(function(e,t){return e*n.dRgbInv[t]})).map(function(e){return e/100}));throw new Error("Conversion requires one and only one: 'h' or 'H'")}function p1(e,n){var t,r,a,o,i,e=e.map(function(e){return 100*e}),e=l1(A(t1,e).map(function(e,t){return e*n.dRgb[t]}),n.fl),s=e[0]+(-12*e[1]+e[2])/11,l=(e[0]+e[1]-2*e[2])/9,u=(Math.atan2(l,s)%e1+e1)%e1,c=.25*(Math.cos(2+u)+3.8),c=Uu(5e4/13*n.nc*n.ncb*Wu(c*Math.sqrt(Math.pow(s,2)+Math.pow(l,2)),e[0]+e[1]+1.05*e[2]+.305),.9)*Math.pow(1.64-Math.pow(.29,n.n),.73),s=Uu(n.nbb*(2*e[0]+e[1]+.05*e[2])/n.aW,.5*n.c*n.z),l=100*Uu(s,2),e=4/n.c*s*(n.aW+4)*n.flRoot,s=c*s,d=s*n.flRoot,u=mc(u*i1),p=((t=mc(t=u))<=o1.h[0]&&(t+=360),r=Gu(o1.h,t)-1,a=(o=x(o1.h.slice(r,2+r),2))[0],o=o[1],p=(i=x(o1.e.slice(r,2+r),2))[0],i=i[1],a=(t-a)/p,o1.H[r]+100*a/(a+(o-t)/i));return{J:l,C:s,h:u,s:50*Uu(n.c*c/(n.aW+4),.5),Q:e,M:d,H:p}}var Dn=new R({id:"cam16-jmh",cssId:"--cam16-jmh",name:"CAM16-JMh",coords:{j:{refRange:[0,100],name:"J"},m:{refRange:[0,105],name:"Colorfulness"},h:{refRange:[0,360],type:"angle",name:"Hue"}},base:oc,fromBase:function(e){e=p1(e,c1);return[e.J,e.M,e.h]},toBase:function(e){return d1({J:e[0],M:e[1],h:e[2]},c1)}}),bn=Zu.D65,f1=216/24389,m1=24389/27;function h1(e){return 8 | "," | "," | "]}}}),y1=Math.PI/180,w1=[1,.007,.0228];function D1(e){e[1]<0&&(e=v1.fromBase(v1.toBase(e)));var t=Math.log(Math.max(1+w1[2]*e[1]*b1.flRoot,1))/w1[2],n=e[0]*y1,r=t*Math.cos(n),t=t*Math.sin(n);return[e[2],r,t]}var x1={deltaE76:function(e,t){return Sc(e,t,"lab")},deltaECMC:function(e,t){var n=void 0===(n=(r=2l){if(3===Object.keys(n).length){var d=R.resolveCoord(n.channel),d=lc(k(e,d.space),d.id);if((d=Mu(d)?0:d)>=n.max)return k({space:"xyz-d65",coords:Zu.D65},e.space);if(d<=n.min)return k({space:"xyz-d65",coords:[0,0,0]},e.space)}var d=R.resolveCoord(a),n=d.space,p=d.id,f=k(e,n);f.coords.forEach(function(e,t){Mu(e)&&(f.coords[t]=0)});for(var n=(d.range||d.refRange)[0],m=(d=(d=l)?Math.floor(Math.log10(Math.abs(d))):0,Math.max(parseFloat("1e".concat(d-2)),1e-6)),h=n,g=lc(f,p);m | [0, 255]"),_1=Array(3).fill("[0, 255]"),O1=new t({id:"srgb",name:"sRGB",base:R1,fromBase:function(e){return e.map(function(e){var t=e<0?-1:1,n=e*t;return.0031308 | "," | [-1,1]"," | [-1,1]"]}}}),Y1=.5*Math.pow(5,.5)+.5,K1=Object.freeze({__proto__:null,contrastAPCA:function(e,t){t=N(t),e=N(e);var n=(t=x((t=k(t,"srgb")).coords,3))[0],r=t[1],t=t[2],a=.2126729*$1(n)+.7151522*$1(r)+.072175*$1(t),e=x((e=k(e,"srgb")).coords,3),e=(n=e[0],r=e[1],t=e[2],.2126729*$1(n)+.7151522*$1(r)+.072175*$1(t)),n=H1(a),r=H1(e),t=n"}),e.defineFunction("steps",nd,{returns:"array"})},steps:nd}),id=new R({id:"hsl",name:"HSL",coords:{h:{refRange:[0,360],type:"angle",name:"Hue"},s:{range:[0,100],name:"Saturation"},l:{range:[0,100],name:"Lightness"}},base:O1,fromBase:function(e){var t=Math.max.apply(Math,D(e)),n=Math.min.apply(Math,D(e)),e=x(e,3),r=e[0],a=e[1],o=e[2],i=NaN,e=0,s=(n+t)/2,l=t-n;if(0!=l){switch(e=0==s||1==s?0:(t-s)/Math.min(s,1-s),t){case r:i=(a-o)/l+(a | ","",""]},hsla:{coords:[" | ","",""],commas:!0,lastAlpha:!0}}}),sd=new R({id:"hsv",name:"HSV",coords:{h:{refRange:[0,360],type:"angle",name:"Hue"},s:{range:[0,100],name:"Saturation"},v:{range:[0,100],name:"Value"}},base:id,fromBase:function(e){var e=x(e,3),t=e[0],n=e[1],e=e[2],n=(e/=100)+(n/=100)*Math.min(e,1-e);return[t,0==n?0:200*(1-e/n),100*n]},toBase:function(e){var e=x(e,3),t=e[0],n=e[1],e=e[2],n=(e/=100)*(1-(n/=100)/2);return[t,0==n||1==n?0:(e-n)/Math.min(n,1-n)*100,100*n]},formats:{color:{id:"--hsv",coords:[" | "," | "," | "]}}}),ld=new R({id:"hwb",name:"HWB",coords:{h:{refRange:[0,360],type:"angle",name:"Hue"},w:{range:[0,100],name:"Whiteness"},b:{range:[0,100],name:"Blackness"}},base:sd,fromBase:function(e){var e=x(e,3),t=e[0],n=e[1],e=e[2];return[t,e*(100-n)/100,100-e]},toBase:function(e){var e=x(e,3),t=e[0],n=e[1],e=e[2],r=(n/=100)+(e/=100);return 1<=r?[t,0,100*(n/r)]:[t,100*(0==(r=1-e)?0:1-n/r),100*r]},formats:{hwb:{coords:[" | "," | "," | "]}}}),ud=new t({id:"a98rgb-linear",cssId:"--a98-rgb-linear",name:"Linear Adobe® 98 RGB compatible",white:"D65",toXYZ_M:[[.5766690429101305,.1855582379065463,.1882286462349947],[.29734497525053605,.6273635662554661,.07529145849399788],[.02703136138641234,.07068885253582723,.9913375368376388]],fromXYZ_M:[[2.0415879038107465,-.5650069742788596,-.34473135077832956],[-.9692436362808795,1.8759675015077202,.04155505740717557],[.013444280632031142,-.11836239223101838,1.0151749943912054]]}),cd=new t({id:"a98rgb",cssId:"a98-rgb",name:"Adobe® 98 RGB compatible",base:ud,toBase:function(e){return e.map(function(e){return Math.pow(Math.abs(e),563/256)*Math.sign(e)})},fromBase:function(e){return e.map(function(e){return Math.pow(Math.abs(e),256/563)*Math.sign(e)})}}),dd=new t({id:"prophoto-linear",cssId:"--prophoto-rgb-linear",name:"Linear ProPhoto",white:"D50",base:En,toXYZ_M:[[.7977666449006423,.13518129740053308,.0313477341283922],[.2880748288194013,.711835234241873,8993693872564e-17],[0,0,.8251046025104602]],fromXYZ_M:[[1.3457868816471583,-.25557208737979464,-.05110186497554526],[-.5446307051249019,1.5082477428451468,.02052744743642139],[0,0,1.2119675456389452]]}),pd=new t({id:"prophoto",cssId:"prophoto-rgb",name:"ProPhoto",base:dd,toBase:function(e){return e.map(function(e){return e<.03125?e/16:Math.pow(e,1.8)})},fromBase:function(e){return e.map(function(e){return 1/512<=e?Math.pow(e,1/1.8):16*e})}}),fd=new R({id:"oklch",name:"Oklch",coords:{l:{refRange:[0,1],name:"Lightness"},c:{refRange:[0,.4],name:"Chroma"},h:{refRange:[0,360],type:"angle",name:"Hue"}},white:"D65",base:Ac,fromBase:function(e){var e=x(e,3),t=e[0],n=e[1],e=e[2],r=Math.abs(n)<2e-4&&Math.abs(e)<2e-4?NaN:180*Math.atan2(e,n)/Math.PI;return[t,Math.sqrt(Math.pow(n,2)+Math.pow(e,2)),mc(r)]},toBase:function(e){var t,e=x(e,3),n=e[0],r=e[1],e=e[2],r=isNaN(e)?t=0:(t=r*Math.cos(e*Math.PI/180),r*Math.sin(e*Math.PI/180));return[n,t,r]},formats:{oklch:{coords:[" | "," | [0,1]"," | "]}}}),md=Zu.D65,hd=(bd=x(X1({space:oc,coords:md}),2))[0],gd=bd[1],bd=new R({id:"luv",name:"Luv",coords:{l:{refRange:[0,100],name:"Lightness"},u:{refRange:[-215,215]},v:{refRange:[-215,215]}},white:md,base:oc,fromBase:function(e){var e=[i(e[0]),i(e[1]),i(e[2])],t=e[1],e=x(X1({space:oc,coords:e}),2),n=e[0],e=e[1];return Number.isFinite(n)&&Number.isFinite(e)?[t=t<=216/24389?24389/27*t:116*Math.cbrt(t)-16,13*t*(n-hd),13*t*(e-gd)]:[0,0,0]},toBase:function(e){var e=x(e,3),t=e[0],n=e[1],e=e[2];return 0===t||Mu(t)?[0,0,0]:(n=i(n),e=i(e),n=n/(13*t)+hd,e=e/(13*t)+gd,[(t=t<=8?t/(24389/27):Math.pow((t+16)/116,3))*(9*n/(4*e)),t,t*((12-3*n-20*e)/(4*e))])},formats:{color:{id:"--luv",coords:[" | "," | [-1,1]"," | [-1,1]"]}}}),md=new R({id:"lchuv",name:"LChuv",coords:{l:{refRange:[0,100],name:"Lightness"},c:{refRange:[0,220],name:"Chroma"},h:{refRange:[0,360],type:"angle",name:"Hue"}},base:bd,fromBase:function(e){var e=x(e,3),t=e[0],n=e[1],e=e[2],r=Math.abs(n)<.02&&Math.abs(e)<.02?NaN:180*Math.atan2(e,n)/Math.PI;return[t,Math.sqrt(Math.pow(n,2)+Math.pow(e,2)),mc(r)]},toBase:function(e){var e=x(e,3),t=e[0],n=e[1],e=e[2];return n<0&&(n=0),isNaN(e)&&(e=0),[t,n*Math.cos(e*Math.PI/180),n*Math.sin(e*Math.PI/180)]},formats:{color:{id:"--lchuv",coords:[" | "," | "," | "]}}}),vd=o[0][0],yd=o[0][1],wd=o[0][2],Dd=o[1][0],xd=o[1][1],Ed=o[1][2],Fd=o[2][0],Cd=o[2][1],Ad=o[2][2];function Nd(e,t,n){t/=Math.sin(n)-e*Math.cos(n);return t<0?1/0:t}function kd(e){var t=Math.pow(e+16,3)/1560896,t=216/24389 | "," | "," | "]}}});function Sd(e,t){return Math.abs(t)/Math.sqrt(Math.pow(e,2)+1)}function Od(e){var t=Sd(e.r0s,e.r0i),n=Sd(e.r1s,e.r1i),r=Sd(e.g0s,e.g0i),a=Sd(e.g1s,e.g1i),o=Sd(e.b0s,e.b0i),e=Sd(e.b1s,e.b1i);return Math.min(t,n,r,a,o,e)}o[0][0],o[0][1],o[0][2],o[1][0],o[1][1],o[1][2],o[2][0],o[2][1],o[2][2];var o=new R({id:"hpluv",name:"HPLuv",coords:{h:{refRange:[0,360],type:"angle",name:"Hue"},s:{range:[0,100],name:"Saturation"},l:{range:[0,100],name:"Lightness"}},base:md,gamutSpace:"self",fromBase:function(e){var t,e=[i(e[0]),i(e[1]),i(e[2])],n=e[0],r=e[2];return 99.9999999 | "," | "," | "]}}}),_d=2610/Math.pow(2,14),Md=Math.pow(2,14)/2610,Id=2523/Math.pow(2,5),Pd=Math.pow(2,5)/2523,Bd=3424/Math.pow(2,12),jd=2413/Math.pow(2,7),Ld=2392/Math.pow(2,7),qd=new t({id:"rec2100pq",cssId:"rec2100-pq",name:"REC.2100-PQ",base:Ee,toBase:function(e){return e.map(function(e){return 1e4*Math.pow(Math.max(Math.pow(e,Pd)-Bd,0)/(jd-Ld*Math.pow(e,Pd)),Md)/203})},fromBase:function(e){return e.map(function(e){var e=Math.max(203*e/1e4,0),t=Bd+jd*Math.pow(e,_d),e=1+Ld*Math.pow(e,_d);return Math.pow(t/e,Id)})}}),zd=.17883277,Vd=.28466892,Hd=.55991073,$d=3.7743,Ud=new t({id:"rec2100hlg",cssId:"rec2100-hlg",name:"REC.2100-HLG",referred:"scene",base:Ee,toBase:function(e){return e.map(function(e){return e<=.5?Math.pow(e,2)/3*$d:(Math.exp((e-Hd)/zd)+Vd)/12*$d})},fromBase:function(e){return e.map(function(e){return(e/=$d)<=1/12?Math.sqrt(3*e):zd*Math.log(12*e-Vd)+Hd})}}),Wd={};function Gd(e){var t=e.id;e.toCone_M,e.fromCone_M;Wd[t]=e}function Yd(e,t,n){var r=Wd[2"===o?(t=e,e=function(){var e=t.apply(void 0,arguments);return tp.get(e)},Object.assign(e,t)):"array"===o&&(e=e.map(function(e){return tp.get(e)})),e}var t=2r.value?(a.value=(a.value-r.value)*e/(n.value-r.value),n.value=e):a.value=n.value=0,r.value=0,t[n.name]=n.value,t[r.name]=r.value,t[a.name]=a.value,t}},{key:"clip",value:function(){var e=new up(this),t=e.getLuminosity(),n=Math.min(e.r,e.g,e.b),r=Math.max(e.r,e.g,e.b);return n<0&&(e.r=t+(e.r-t)*t/(t-n),e.g=t+(e.g-t)*t/(t-n),e.b=t+(e.b-t)*t/(t-n)),1Math.ceil(l.left+l.width)||Math.floor(e.top+e.height)>Math.ceil(l.top+l.height))}))}while(e=u);return!1}function xp(e){for(var t=g(e).parent;t;){if(ff(t.actualNode))return t.actualNode;t=t.parent}}var Ep=function r(a,o){var t=2a&&e.left>n.right||e.top>r&&e.top>n.bottom||e.rightn.right||e.top>n.bottom)||"scroll"===a.overflow||"auto"===a.overflow||t instanceof window.HTMLBodyElement||t instanceof window.HTMLHtmlElement)},Ap=0;function Np(e,t,n){var r;return be(this,Np),(r=le(this,Np)).shadowId=n,r.children=[],r.actualNode=e,(r.parent=t)||(Ap=0),r.nodeIndex=Ap++,r._isHidden=null,r._cache={},r._isXHTML=ma(e.ownerDocument),"input"===e.nodeName.toLowerCase()&&(n=e.getAttribute("type"),n=r._isXHTML?n:(n||"").toLowerCase(),Qf().includes(n)||(n="text"),r._type=n),v.get("nodeMap")&&v.get("nodeMap").set(e,ue(r)),r}pe(Np,p),ye(Np,[{key:"props",get:function(){var e,t,n,r;return this._cache.hasOwnProperty("props")||(e=(r=this.actualNode).nodeType,t=r.nodeName,n=r.id,r=r.nodeValue,this._cache.props={nodeType:e,nodeName:this._isXHTML?t:t.toLowerCase(),id:n,type:this._type,nodeValue:r},1===e&&(this._cache.props.multiple=this.actualNode.multiple,this._cache.props.value=this.actualNode.value,this._cache.props.selected=this.actualNode.selected,this._cache.props.checked=this.actualNode.checked,this._cache.props.indeterminate=this.actualNode.indeterminate)),this._cache.props}},{key:"attr",value:function(e){return"function"!=typeof this.actualNode.getAttribute?null:this.actualNode.getAttribute(e)}},{key:"hasAttr",value:function(e){return"function"==typeof this.actualNode.hasAttribute&&this.actualNode.hasAttribute(e)}},{key:"attrNames",get:function(){var e;return this._cache.hasOwnProperty("attrNames")||(e=(this.actualNode.attributes instanceof window.NamedNodeMap?this.actualNode:this.actualNode.cloneNode(!1)).attributes,this._cache.attrNames=Array.from(e).map(function(e){return e.name})),this._cache.attrNames}},{key:"getComputedStylePropertyValue",value:function(e){var t="computedStyle_"+e;return this._cache.hasOwnProperty(t)||(this._cache.hasOwnProperty("computedStyle")||(this._cache.computedStyle=window.getComputedStyle(this.actualNode)),this._cache[t]=this._cache.computedStyle.getPropertyValue(e)),this._cache[t]}},{key:"isFocusable",get:function(){return this._cache.hasOwnProperty("isFocusable")||(this._cache.isFocusable=y(this.actualNode)),this._cache.isFocusable}},{key:"tabbableElements",get:function(){return this._cache.hasOwnProperty("tabbableElements")||(this._cache.tabbableElements=hs(this)),this._cache.tabbableElements}},{key:"clientRects",get:function(){return this._cache.hasOwnProperty("clientRects")||(this._cache.clientRects=Array.from(this.actualNode.getClientRects()).filter(function(e){return 0e.clientWidth+r,r=e.scrollHeight>e.clientHeight+r;if(a||r)return t=pf(n=window.getComputedStyle(e),"overflow-x"),n=pf(n,"overflow-y"),a&&t||r&&n?{elm:e,top:e.scrollTop,left:e.scrollLeft}:void 0}),mf=function(){var e=0=n.startTime}),a=0;a"].includes(b[0].combinator))throw new Error("axe.utils.querySelectorAll does not support the combinator: "+g[1].combinator);(">"===b[0].combinator?d=d||[]:p=p||[]).push(b)}g[0].id&&c.shadowId!==s.parentShadowId||null==(b=s.anyLevel)||!b.includes(g)||(p=p||[]).push(g)}for(c.children&&c.children.length&&(i.push(s),s=_f(c.children,p,d,c.shadowId,o.pop()));s.vNodesIndex===s.vNodes.length&&i.length;)o.push(s),s=i.pop()}return l},If=function(e){var t,n,s,l,e=void 0===(e=e.treeRoot)?axe._tree[0]:e;return t=[],e=Mf(e=e,"*",function(e){return!t.includes(e.shadowId)&&(t.push(e.shadowId),!0)}).map(function(e){return{shadowId:e.shadowId,rootNode:Ko(e.actualNode)}}),(e=Of(e,[])).length?(n=document.implementation.createHTMLDocument("Dynamic document for loading cssom"),n=bf(n),s=n,l=[],e.forEach(function(e,t){var n=e.rootNode,e=e.shadowId,r=function(e,t,n){t=11===e.nodeType&&t?function(r,a){return Array.from(r.children).filter(Pf).reduce(function(e,t){var n=t.nodeName.toUpperCase(),t="STYLE"===n?t.textContent:t,t=a({data:t,isLink:"LINK"===n,root:r});return t.sheet&&e.push(t.sheet),e},[])}(e,n):function(e){return Array.from(e.styleSheets).filter(function(e){return!!e.media&&Bf(e.media.mediaText)})}(e);return function(e){var t=[];return e.filter(function(e){if(e.href){if(t.includes(e.href))return!1;t.push(e.href)}return!0})}(t)}(n,e,s);if(!r)return Promise.all(l);var a=t+1,o={rootNode:n,shadowId:e,convertDataToStylesheet:s,rootIndex:a},i=[],t=Promise.all(r.map(function(e,t){return kf(e,o,[a,t],i)}));l.push(t)}),Promise.all(l).then(function n(e){return e.reduce(function(e,t){return Array.isArray(t)?e.concat(n(t)):e.concat(t)},[])})):Promise.resolve()};function Pf(e){var t=e.nodeName.toUpperCase(),n=e.getAttribute("href"),r=e.getAttribute("rel"),n="LINK"===t&&n&&r&&e.rel.toUpperCase().includes("STYLESHEET");return"STYLE"===t||n&&Bf(e.media)}function Bf(e){return!e||!e.toUpperCase().includes("PRINT")}var jf=function(e){return e=void 0===(e=e.treeRoot)?axe._tree[0]:e,e=Mf(e,"video, audio",function(e){e=e.actualNode;return e.hasAttribute("src")?!!e.getAttribute("src"):!(Array.from(e.getElementsByTagName("source")).filter(function(e){return!!e.getAttribute("src")}).length<=0)}),Promise.all(e.map(function(e){var n,e=e.actualNode;return n=e,new Promise(function(t){0.95*xi(window).width||s<10||e.querySelector("object, embed, iframe, applet"))},Dm=function(e){return!(!Bs(e)&&!js(e))||!!e.getAttribute("id")&&(e=m(e.getAttribute("id")),!!document.querySelector('[headers~="'.concat(e,'"]')))},xm=function(e,t,n,r){if(Array.isArray(t)&&(r=n,n=t,t={x:0,y:0}),"string"==typeof e)switch(e){case"left":e={x:-1,y:0};break;case"up":e={x:0,y:-1};break;case"right":e={x:1,y:0};break;case"down":e={x:0,y:1}}return function e(t,n,r,a){var o,i=r[n.y]?r[n.y][n.x]:void 0;return i?"function"==typeof a&&!0===(o=a(i,n,r))?[i]:((o=e(t,{x:n.x+t.x,y:n.y+t.y},r,a)).unshift(i),o):[]}(e,{x:t.x+e.x,y:t.y+e.y},n,r)};function Em(e){var t=gm(e),r=this,a=[],t=(t.forEach(function(e){var t=e.getAttribute("headers"),t=(t&&(a=a.concat(t.split(/\s+/))),e.getAttribute("aria-labelledby"));t&&(a=a.concat(t.split(/\s+/)))}),t.filter(function(e){return""!==F(e.textContent)&&("TH"===e.nodeName.toUpperCase()||-1!==["rowheader","columnheader"].indexOf(e.getAttribute("role")))})),o=Ms(e),i=!0;return t.forEach(function(t){var e,n;t.getAttribute("id")&&a.includes(t.getAttribute("id"))||(e=Is(t,o),n=!1,(n=!(n=Bs(t)?xm("down",e,o).find(function(e){return!Bs(e)&&vm(e,o).includes(t)}):n)&&js(t)?xm("right",e,o).find(function(e){return!js(e)&&vm(e,o).includes(t)}):n)||r.relatedNodes(t),i=i&&n)}),!!i||void 0}var Fm={},Cm=(Se(Fm,{allowedAttr:function(){return Cm},arialabelText:function(){return Fs},arialabelledbyText:function(){return Es},getAccessibleRefs:function(){return Nm},getElementUnallowedRoles:function(){return Sm},getExplicitRole:function(){return c},getImplicitRole:function(){return rl},getOwnedVirtual:function(){return cl},getRole:function(){return d},getRoleType:function(){return mu},getRolesByType:function(){return _m},getRolesWithNameFromContents:function(){return Bm},implicitNodes:function(){return Lm},implicitRole:function(){return rl},isAccessibleRef:function(){return qm},isAriaRoleAllowedOnElement:function(){return km},isComboboxPopup:function(){return zm},isUnsupportedRole:function(){return Rs},isValidRole:function(){return Ss},label:function(){return Hm},labelVirtual:function(){return Gl},lookupTable:function(){return jm},namedFromContents:function(){return ul},requiredAttr:function(){return $m},requiredContext:function(){return Um},requiredOwned:function(){return Wm},validateAttr:function(){return Ym},validateAttrValue:function(){return Gm}}),function(e){var e=w.ariaRoles[e],t=D(_s());return e&&(e.allowedAttrs&&t.push.apply(t,D(e.allowedAttrs)),e.requiredAttrs)&&t.push.apply(t,D(e.requiredAttrs)),t}),Am=/^idrefs?$/,Nm=function(e){e=e.actualNode||e;var t=(t=Xo(e)).documentElement||t,n=v.get("idRefsByRoot",function(){return new Map}),r=n.get(t);return r||(r=new Map,n.set(t,r),function e(t,n,r){if(t.hasAttribute){var a;"LABEL"===t.nodeName.toUpperCase()&&t.hasAttribute("for")&&(a=t.getAttribute("for"),n.has(a)?n.get(a).push(t):n.set(a,[t]));for(var o=0;o option, datalist > option, optgroup > option")},SELECT:function(e){var t=e.node,e=e.role;return!t.multiple&&t.size<=1&&"menu"===e},SVG:function(e){var t=e.node,e=e.out;return!(!t.parentNode||"http://www.w3.org/2000/svg"!==t.parentNode.namespaceURI)||e}},cd.rolesOfType={widget:["button","checkbox","dialog","gridcell","link","log","marquee","menuitem","menuitemcheckbox","menuitemradio","option","progressbar","radio","scrollbar","searchbox","slider","spinbutton","status","switch","tab","tabpanel","textbox","timer","tooltip","tree","treeitem"]},cd),Lm=function(e){var t=null,e=jm.role[e];return t=e&&e.implicit?Ba(e.implicit):t},qm=function(e){return!!Nm(e).length};function zm(e){var t=(1=o}},Ym=function(e){return!!w.ariaAttrs[e]};function Km(e){var t=[],n=gm(e),r=Ms(e);return n.forEach(function(e){su(e)&&ym(e)&&!Hm(e)&&!vm(e,r).some(function(e){return null!==e&&!!su(e)})&&t.push(e)}),!t.length||(this.relatedNodes(t),!1)}function Xm(e,t){return e=e.getAttribute("scope").toLowerCase(),-1!==t.values.indexOf(e)}function Zm(e,t,n){var r;if(void 0!==n.children)return r=n.attr("summary"),!(!(n=!!(n=n.children.find(Jm))&&F(Nl(n)))||!r)&&F(r).toLowerCase()===F(n).toLowerCase()}function Jm(e){return"caption"===e.props.nodeName}function Qm(e){return!fu(document)||"TH"===e.nodeName.toUpperCase()}function eh(e){var t=Ms(e),r=t[0];return t.length<=1||r.length<=1||e.rows.length<=1||r.reduce(function(e,t,n){return e||t!==r[n+1]&&void 0!==r[n+1]},!1)}function th(e,t,n){if(n.children){n=n.children.find(function(e){return"title"===e.props.nodeName});if(!n)return this.data({messageKey:"noTitle"}),!1;try{if(""===Nl(n,{includeHidden:!0}).trim())return this.data({messageKey:"emptyTitle"}),!1}catch(e){return}return!0}}var nh={};function rh(e,t,n){var r=n.props.nodeName,a=(n.attr("type")||"").toLowerCase();return(n=n.attr("value"))&&this.data({messageKey:"has-label"}),!("input"!==r||!["submit","reset"].includes(a))&&null===n}function ah(e,t,n){var r=n.props.nodeName;return!!["img","input","area"].includes(r)&&n.hasAttr("alt")}function oh(){}function ih(){var e=document.title;return!!F(e)}function sh(t,e){return!(0<(e=e.cssProperties.filter(function(e){if("important"===t.style.getPropertyPriority(e))return e})).length&&(this.data(e),1))}function lh(e,t,n){try{return!!F(Es(n))}catch(e){}}function uh(e,t,n){return!!F(Fs(n))}function ch(t){var e,n=t.getAttribute("id").trim();return!n||(e=Xo(t),(e=Array.from(e.querySelectorAll('[id="'.concat(m(n),'"]'))).filter(function(e){return e!==t})).length&&this.relatedNodes(e),this.data(n),0===e.length)}function dh(e){var t=[];return e.filter(function(e){return-1===t.indexOf(e.data)&&(t.push(e.data),!0)})}function ph(e,t,n){return n=F(n.attr("title")).toLowerCase(),this.data(n),!0}function fh(e){var t={};return e.forEach(function(e){t[e.data]=void 0!==t[e.data]?++t[e.data]:0}),e.forEach(function(e){e.result=!!t[e.data]}),e}function mh(e){return!!(e=cs(e,"href"))&&(C(e)||void 0)}Se(nh,{getAriaRolesByType:function(){return Om},getAriaRolesSupportingNameFromContent:function(){return Mm},getElementSpec:function(){return nl},getElementsByContentType:function(){return Os},getGlobalAriaAttrs:function(){return _s},implicitHtmlRoles:function(){return zs}}),ud=Ci;var hh=["alert","log","status"];function gh(e,t){var n=e.actualNode,r=d(e),n=(n.getAttribute("aria-live")||"").toLowerCase().trim(),a=Om("landmark");return!!(["assertive","polite"].includes(n)||hh.includes(r)||a.includes(r)||t.regionMatcher&&tl(e,t.regionMatcher))}function bh(e){var o=e.filter(function(e){return e.data.isIframe});return e.forEach(function(e){if(!e.result&&1!==e.node.ancestry.length){var t,n=e.node.ancestry.slice(0,-1),r=T(o);try{for(r.s();!(t=r.n()).done;){var a=t.value;if(Ef(n,a.node.ancestry)){e.result=a.result;break}}}catch(e){r.e(e)}finally{r.f()}}}),o.forEach(function(e){e.result||(e.result=!0)}),e}function vh(e){e=window.getComputedStyle(function(e){for(var t=e,n=e.textContent.trim(),r=n;r===n&&void 0!==t;){var a=-1;if(0===(e=t).children.length)return e;for(;a++,""===(r=e.children[a].textContent.trim())&&a+1r.fontSize)&&(!t.weight||n.fontWeight-t.weight>r.fontWeight)&&(!t.italic||n.isItalic&&!r.isItalic)},!1)}function wh(e,t,n){var r=(i=Array.from(e.parentNode.children)).indexOf(e),a=(t=t||{}).margins||[],o=i.slice(r+1).find(function(e){return"P"===e.nodeName.toUpperCase()}),i=i.slice(0,r).reverse().find(function(e){return"P"===e.nodeName.toUpperCase()}),r=vh(e),s=o?vh(o):null,i=i?vh(i):null,l=t.passLength,t=t.failLength,e=e.textContent.trim().length;return(o=null==o?void 0:o.textContent.trim().length)*lwindow.innerWidth||e.top>window.innerHeight))return{x:Math.min(Math.ceil(e.left+e.width/2),window.innerWidth-1),y:Math.min(Math.ceil(e.top+e.height/2),window.innerHeight-1)}});function Ah(e){return e.getPropertyValue("font-family").split(/[,;]/g).map(function(e){return e.trim().toLowerCase()})}var Nh=function(e,t){var n,r=window.getComputedStyle(e);return"none"!==r.getPropertyValue("background-image")||!!["border-bottom","border-top","outline"].reduce(function(e,t){var n=new S;return n.parseString(r.getPropertyValue(t+"-color")),e||"none"!==r.getPropertyValue(t+"-style")&&0=n.top&&e.bottom<=a&&e.left>=n.left&&e.right<=r})}function Zh(e){return e||void 0}var Jh=function(e,t){return t&&e?(t.alpha<1&&(t=Oh(t,e)),e=e.getRelativeLuminance(),t=t.getRelativeLuminance(),(Math.max(t,e)+.05)/(Math.min(t,e)+.05)):null};function Qh(e,t,n){for(var a=3=t.top&&e.left>=t.left&&e.bottom<=t.bottom&&e.right<=t.right}function dg(e){return{width:Math.round(10*e.width)/10,height:Math.round(10*e.height)/10}}function pg(e,t){return e.actualNode.contains(t.actualNode)&&!bs(t)}function fg(e){return e.map(function(e){return e.actualNode})}function mg(e,t,n){var r,a=void 0===(a=(t=t||{}).scaleMinimum)?2:a,t=void 0!==(t=t.lowerBound)&&t;return!((n=n.attr("content")||"")&&(n=n.split(/[;,]/).reduce(function(e,t){var n,t=t.trim();return t&&(n=(t=x(t.split("="),2))[0],t=t[1],n)&&t&&(n=n.toLowerCase().trim(),t=t.toLowerCase().trim(),"maximum-scale"===n&&"yes"===t&&(t=1),"maximum-scale"===n&&parseFloat(t)<0||(e[n]=t)),e},{}),!(t&&n["maximum-scale"]&&parseFloat(n["maximum-scale"]) "),n[t]=e,!0):(t=e.node.ancestry.slice(0,e.node.ancestry.length-1).flat(1/0).join(" > "),n[t]&&(n[t].result=!0),!1)})}function yg(e,t,n){return!$f(n,"track").some(function(e){return"captions"===(e.attr("kind")||"").toLowerCase()})&&void 0}function wg(e,t,n){var r=n.children;if(!r||!r.length)return!1;for(var a,o=!1,i=!1,s=0;s=i||"bold"===r,r=Math.ceil(72*m)/96,s=i&&r ":"";if(3===i&&""!==a.trim())return t+"#text";if(1!==i||!C(e))return!1;a=c(e);return a?!r.includes(a)&&t+"[role=".concat(a,"]"):!n.includes(o)&&t+o}(s,i,t);l&&(a.includes(l)||a.push(l),1===(null==s||null==(i=s.actualNode)?void 0:i.nodeType))&&r.push(s.actualNode)}}return 0===a.length?!1:(this.data({values:a.join(", ")}),this.relatedNodes(r),!0)}},"invalidrole-evaluate":w0,"is-element-focusable-evaluate":y0,"is-initiator-matches":rb,"is-on-screen-evaluate":ud,"is-visible-matches":Ci,"is-visible-on-screen-matches":function(e,t){return Ci(t)},"label-content-name-mismatch-evaluate":Mg,"label-content-name-mismatch-matches":function(e,t){var n=d(e);return!!(n&&Om("widget").includes(n)&&Mm().includes(n)&&(F(Fs(t))||F(Es(e)))&&F(fl(t)))},"label-matches":function(e,t){return"input"!==t.props.nodeName||!1===t.hasAttr("type")||(t=t.attr("type").toLowerCase(),!1===["hidden","image","button","submit","reset"].includes(t))},"landmark-has-body-context-matches":function(e,t){return e.hasAttribute("role")||!Jo(t,"article, aside, main, nav, section")},"landmark-is-top-level-evaluate":Hg,"landmark-is-unique-after":Tg,"landmark-is-unique-evaluate":kg,"landmark-unique-matches":function(e,t){return n=t,a=Om("landmark"),!!(o=d(n))&&("header"!==(r=n.props.nodeName)&&"footer"!==r?"section"!==r&&"form"!==r?0<=a.indexOf(o)||"region"===o:!!zl(n):!Ga(n,nb))&&C(t);var n,r,a,o},"layout-table-matches":tb,"link-in-text-block-evaluate":d0,"link-in-text-block-matches":eb,"link-in-text-block-style-evaluate":function(e){if(s0(e))return!1;for(var t=u(e);t&&1===t.nodeType&&!s0(t);)t=u(t);return t?(this.relatedNodes([t]),!!Nh(e,t)||!!function(e){for(var t=0,n=["before","after"];tt.maxDelay},"meta-viewport-scale-evaluate":mg,"multiple-label-evaluate":Sg,"nested-interactive-matches":Q0,"no-autoplay-audio-evaluate":gg,"no-autoplay-audio-matches":J0,"no-empty-role-matches":Z0,"no-explicit-name-required-matches":W0,"no-focusable-content-evaluate":function(e,t,n){if(n.children)try{var r,a=function t(e){if(!e.children){if(1===e.props.nodeType)throw new Error("Cannot determine children");return[]}var n=[];e.children.forEach(function(e){"widget"===mu(e)&&y(e)?n.push(e):n.push.apply(n,D(t(e)))});return n}(n);return a.length?(0<(r=a.filter(Vg)).length?(this.data({messageKey:"notHidden"}),this.relatedNodes(r)):this.relatedNodes(a),!1):!0}catch(e){}},"no-implicit-explicit-label-evaluate":v0,"no-naming-method-matches":X0,"no-negative-tabindex-matches":K0,"no-role-matches":Y0,"non-empty-if-present-evaluate":rh,"not-html-matches":G0,"object-is-loaded-matches":function(t,n){return[W0,function(e){var t;return null==e||null==(t=e.ownerDocument)||!t.createRange||((t=e.ownerDocument.createRange()).setStart(e,0),t.setEnd(e,e.childNodes.length),0===t.getClientRects().length)}].every(function(e){return e(t,n)})},"only-dlitems-evaluate":function(e,t,n){var a=["definition","term","list"];return(n=n.children.reduce(function(e,t){var n=t.actualNode;return"DIV"===n.nodeName.toUpperCase()&&null===d(n)?e.concat(t.children):e.concat(t)},[]).reduce(function(e,t){var n,t=t.actualNode,r=t.nodeName.toUpperCase();return 1===t.nodeType&&C(t)?(n=c(t),("DT"!==r&&"DD"!==r||n)&&!a.includes(n)&&e.badNodes.push(t)):3===t.nodeType&&""!==t.nodeValue.trim()&&(e.hasNonEmptyTextNode=!0),e},{badNodes:[],hasNonEmptyTextNode:!1})).badNodes.length&&this.relatedNodes(n.badNodes),!!n.badNodes.length||n.hasNonEmptyTextNode},"only-listitems-evaluate":Dg,"p-as-heading-evaluate":wh,"p-as-heading-matches":U0,"page-no-duplicate-after":Qg,"page-no-duplicate-evaluate":Jg,"presentation-role-conflict-matches":$0,"presentational-role-evaluate":function(e,t,n){var r=c(n);if(["presentation","none"].includes(r)&&["iframe","frame"].includes(n.props.nodeName)&&n.hasAttr("title"))this.data({messageKey:"iframe",nodeName:n.props.nodeName});else{var a,o=d(n);if(["presentation","none"].includes(o))return this.data({role:o}),!0;["presentation","none"].includes(r)&&(r=_s().some(function(e){return n.hasAttr(e)}),a=y(n),this.data({messageKey:r&&!a?"globalAria":!r&&a?"focusable":"both",role:o}))}return!1},"region-after":bh,"region-evaluate":function(e,t,n){return this.data({isIframe:["iframe","frame"].includes(n.props.nodeName)}),!v.get("regionlessNodes",function(){return function t(e,n){var r=e.actualNode;{if("button"===d(e)||gh(e,n)||["iframe","frame"].includes(e.props.nodeName)||mp(e.actualNode)&&cs(e.actualNode,"href")||!C(r)){for(var a=e;a;)a._hasRegionDescendant=!0,a=a.parent;return["iframe","frame"].includes(e.props.nodeName)?[e]:[]}return r!==document.body&&su(r,!0)?[e]:e.children.filter(function(e){e=e.actualNode;return 1===e.nodeType}).map(function(e){return t(e,n)}).reduce(function(e,t){return e.concat(t)},[])}}(axe._tree[0],t).map(function(e){for(;e.parent&&!e.parent._hasRegionDescendant&&e.parent.actualNode!==document.body;)e=e.parent;return e}).filter(function(e,t,n){return n.indexOf(e)===t})}).includes(n)},"same-caption-summary-evaluate":Zm,"scope-value-evaluate":Xm,"scrollable-region-focusable-matches":function(e,t){return void 0!==ff(e,13)&&!1===zm(t)&&$f(t,"*").some(function(e){return iu(e,!0,!0)})},"skip-link-evaluate":mh,"skip-link-matches":H0,"structured-dlitems-evaluate":wg,"svg-namespace-matches":L0,"svg-non-empty-title-evaluate":th,"tabindex-evaluate":zg,"table-or-grid-role-matches":function(e,t){return t=d(t),["treegrid","grid","table"].includes(t)},"target-offset-evaluate":function(e,t,n){var r=(null==t?void 0:t.minOffset)||24;if(_i(10*r,n.boundingClientRect))return this.data({messageKey:"large",minOffset:r}),!0;var a,o,i=[],s=r,l=T(Qi(n,r));try{for(l.s();!(a=l.n()).done;){var u,c=a.value;"widget"===mu(c)&&y(c)&&(r<=.05+(u=2*(o=Mi(n,c,r/2),Math.round(10*o)/10))||(s=Math.min(s,u),i.push(c)))}}catch(e){l.e(e)}finally{l.f()}return 0===i.length?(this.data({closestOffset:s,minOffset:r}),!0):(this.relatedNodes(i.map(function(e){return e.actualNode})),i.some(bs)?(this.data({closestOffset:s,minOffset:r}),!bs(n)&&void 0):void this.data({messageKey:"nonTabbableNeighbor",closestOffset:s,minOffset:r}))},"target-size-evaluate":function(e,t,n){var r,a,o,i,s,l,u,t=(null==t?void 0:t.minSize)||24,c=n.boundingClientRect;return _i(10*t,c)?(this.data({messageKey:"large",minSize:t}),!0):(u=_i.bind(null,t),i=Qi(n),a=n,r=i.filter(function(e){return!cg(e,a)&&pg(a,e)}),o=(i=function(e,t){var n,r=[],a=[],o=T(t);try{for(o.s();!(n=o.n()).done;){var i=n.value;!pg(e,i)&&Pi(e,i)&&"none"!==i.getComputedStylePropertyValue("pointer-events")&&(cg(e,i)?r:a).push(i)}}catch(e){o.e(e)}finally{o.f()}return{fullyObscuringElms:r,partialObscuringElms:a}}(n,i)).fullyObscuringElms,i=i.partialObscuringElms,!r.length||!o.length&&u(c)?o.length?(this.relatedNodes(fg(o)),this.data({messageKey:"obscured"}),!0):(o=!bs(n)&&void 0,u(c)?(i=i.filter(function(e){return"widget"===mu(e)&&y(e)})).length?(s=i,n=(n=n).boundingClientRect,s=i.map(function(e){return e.boundingClientRect}),(s=0!==(n=Bi(n,s)).length?(l=void 0,n.reduce(function(e,t){var n=_i(l,e);return n!==_i(l,t)?n?e:t:(n=e.width*e.height,t.width*t.height elements of image maps have alternate text",help:"Active  elements must have alternate text"},"aria-allowed-attr":{description:"Ensures an element's role supports its ARIA attributes",help:"Elements must only use supported ARIA attributes"},"aria-allowed-role":{description:"Ensures role attribute has an appropriate value for the element",help:"ARIA role should be appropriate for the element"},"aria-braille-equivalent":{description:"Ensure aria-braillelabel and aria-brailleroledescription have a non-braille equivalent",help:"aria-braille attributes must have a non-braille equivalent"},"aria-command-name":{description:"Ensures every ARIA button, link and menuitem has an accessible name",help:"ARIA commands must have an accessible name"},"aria-conditional-attr":{description:"Ensures ARIA attributes are used as described in the specification of the element's role",help:"ARIA attributes must be used as specified for the element's role"},"aria-deprecated-role":{description:"Ensures elements do not use deprecated roles",help:"Deprecated ARIA roles must not be used"},"aria-dialog-name":{description:"Ensures every ARIA dialog and alertdialog node has an accessible name",help:"ARIA dialog and alertdialog nodes should have an accessible name"},"aria-hidden-body":{description:'Ensures aria-hidden="true" is not present on the document body.',help:'aria-hidden="true" must not be present on the document body'},"aria-hidden-focus":{description:"Ensures aria-hidden elements are not focusable nor contain focusable elements",help:"ARIA hidden element must not be focusable or contain focusable elements"},"aria-input-field-name":{description:"Ensures every ARIA input field has an accessible name",help:"ARIA input fields must have an accessible name"},"aria-meter-name":{description:"Ensures every ARIA meter node has an accessible name",help:"ARIA meter nodes must have an accessible name"},"aria-progressbar-name":{description:"Ensures every ARIA progressbar node has an accessible name",help:"ARIA progressbar nodes must have an accessible name"},"aria-prohibited-attr":{description:"Ensures ARIA attributes are not prohibited for an element's role",help:"Elements must only use permitted ARIA attributes"},"aria-required-attr":{description:"Ensures elements with ARIA roles have all required ARIA attributes",help:"Required ARIA attributes must be provided"},"aria-required-children":{description:"Ensures elements with an ARIA role that require child roles contain them",help:"Certain ARIA roles must contain particular children"},"aria-required-parent":{description:"Ensures elements with an ARIA role that require parent roles are contained by them",help:"Certain ARIA roles must be contained by particular parents"},"aria-roledescription":{description:"Ensure aria-roledescription is only used on elements with an implicit or explicit role",help:"aria-roledescription must be on elements with a semantic role"},"aria-roles":{description:"Ensures all elements with a role attribute use a valid value",help:"ARIA roles used must conform to valid values"},"aria-text":{description:'Ensures role="text" is used on elements with no focusable descendants',help:'"role=text" should have no focusable descendants'},"aria-toggle-field-name":{description:"Ensures every ARIA toggle field has an accessible name",help:"ARIA toggle fields must have an accessible name"},"aria-tooltip-name":{description:"Ensures every ARIA tooltip node has an accessible name",help:"ARIA tooltip nodes must have an accessible name"},"aria-treeitem-name":{description:"Ensures every ARIA treeitem node has an accessible name",help:"ARIA treeitem nodes should have an accessible name"},"aria-valid-attr-value":{description:"Ensures all ARIA attributes have valid values",help:"ARIA attributes must conform to valid values"},"aria-valid-attr":{description:"Ensures attributes that begin with aria- are valid ARIA attributes",help:"ARIA attributes must conform to valid names"},"audio-caption":{description:"Ensures