From 69e93308b0146fab1513037e4b28c5bdb209243d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mario=20Sch=C3=BCnadel?= Date: Wed, 12 Jan 2022 14:43:15 +0100 Subject: [PATCH 1/3] Include *.kt files in buildSrc in cache key --- README.md | 2 +- __tests__/cache.test.ts | 26 +++++++++++++++++++++++++- dist/cleanup/index.js | 2 +- dist/setup/index.js | 2 +- src/cache.ts | 2 +- 5 files changed, 29 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index c02eb6c..0dbf3fa 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ Currently, the following distributions are supported: ### Caching packages dependencies The action has a built-in functionality for caching and restoring dependencies. It uses [actions/cache](https://github.com/actions/cache) under hood for caching dependencies but requires less configuration settings. Supported package managers are gradle and maven. The format of the used cache key is `setup-java-${{ platform }}-${{ packageManager }}-${{ fileHash }}`, where the hash is based on the following files: -- gradle: `**/*.gradle*`, `**/gradle-wrapper.properties` +- gradle: `**/*.gradle*`, `**/gradle-wrapper.properties`, `buildSrc/**/*.kt` - maven: `**/pom.xml` The cache input is optional, and caching is turned off by default. diff --git a/__tests__/cache.test.ts b/__tests__/cache.test.ts index b748e80..ab058de 100644 --- a/__tests__/cache.test.ts +++ b/__tests__/cache.test.ts @@ -98,7 +98,7 @@ describe('dependency cache', () => { await expect(restore('gradle')).rejects.toThrowError( `No file in ${projectRoot( workspace - )} matched to [**/*.gradle*,**/gradle-wrapper.properties], make sure you have checked out the target repository` + )} matched to [**/*.gradle*,**/gradle-wrapper.properties,buildSrc/**/*.kt], make sure you have checked out the target repository` ); }); it('downloads cache based on build.gradle', async () => { @@ -118,6 +118,15 @@ describe('dependency cache', () => { expect(spyInfo).toBeCalledWith('gradle cache is not found'); }); }); + it('downloads cache based on buildSrc/Versions.kt', async () => { + createDirectory(join(workspace, 'buildSrc')); + createFile(join(workspace, 'buildSrc', 'Versions.kt')); + + await restore('gradle'); + expect(spyCacheRestore).toBeCalled(); + expect(spyWarning).not.toBeCalled(); + expect(spyInfo).toBeCalledWith('gradle cache is not found'); + }); }); describe('save', () => { let spyCacheSave: jest.SpyInstance< @@ -188,6 +197,16 @@ describe('dependency cache', () => { createFile(join(workspace, 'build.gradle.kts')); createStateForSuccessfulRestore(); + await save('gradle'); + expect(spyCacheSave).toBeCalled(); + expect(spyWarning).not.toBeCalled(); + expect(spyInfo).toBeCalledWith(expect.stringMatching(/^Cache saved with the key:.*/)); + }); + it('uploads cache based on buildSrc/Versions.kt', async () => { + createDirectory(join(workspace, 'buildSrc')); + createFile(join(workspace, 'buildSrc', 'Versions.kt')); + createStateForSuccessfulRestore(); + await save('gradle'); expect(spyCacheSave).toBeCalled(); expect(spyWarning).not.toBeCalled(); @@ -236,6 +255,11 @@ function createFile(path: string) { fs.writeFileSync(path, ''); } +function createDirectory(path: string) { + core.info(`created a directory at ${path}`); + fs.mkdirSync(path); +} + function projectRoot(workspace: string): string { if (os.platform() === 'darwin') { return `/private${workspace}`; diff --git a/dist/cleanup/index.js b/dist/cleanup/index.js index 0a20472..c51cb6f 100644 --- a/dist/cleanup/index.js +++ b/dist/cleanup/index.js @@ -61833,7 +61833,7 @@ const supportedPackageManager = [ id: 'gradle', path: [path_1.join(os_1.default.homedir(), '.gradle', 'caches'), path_1.join(os_1.default.homedir(), '.gradle', 'wrapper')], // https://github.com/actions/cache/blob/0638051e9af2c23d10bb70fa9beffcad6cff9ce3/examples.md#java---gradle - pattern: ['**/*.gradle*', '**/gradle-wrapper.properties'] + pattern: ['**/*.gradle*', '**/gradle-wrapper.properties', 'buildSrc/**/*.kt'] } ]; function findPackageManager(id) { diff --git a/dist/setup/index.js b/dist/setup/index.js index 34b866d..f8f1bd7 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -18624,7 +18624,7 @@ const supportedPackageManager = [ id: 'gradle', path: [path_1.join(os_1.default.homedir(), '.gradle', 'caches'), path_1.join(os_1.default.homedir(), '.gradle', 'wrapper')], // https://github.com/actions/cache/blob/0638051e9af2c23d10bb70fa9beffcad6cff9ce3/examples.md#java---gradle - pattern: ['**/*.gradle*', '**/gradle-wrapper.properties'] + pattern: ['**/*.gradle*', '**/gradle-wrapper.properties', 'buildSrc/**/*.kt'] } ]; function findPackageManager(id) { diff --git a/src/cache.ts b/src/cache.ts index fb97fb0..c9a728b 100644 --- a/src/cache.ts +++ b/src/cache.ts @@ -31,7 +31,7 @@ const supportedPackageManager: PackageManager[] = [ id: 'gradle', path: [join(os.homedir(), '.gradle', 'caches'), join(os.homedir(), '.gradle', 'wrapper')], // https://github.com/actions/cache/blob/0638051e9af2c23d10bb70fa9beffcad6cff9ce3/examples.md#java---gradle - pattern: ['**/*.gradle*', '**/gradle-wrapper.properties'] + pattern: ['**/*.gradle*', '**/gradle-wrapper.properties', 'buildSrc/**/*.kt'] } ]; From f56009127dac2854d9c81a864779e12536d0e1ff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mario=20Sch=C3=BCnadel?= Date: Mon, 25 Apr 2022 13:13:12 +0200 Subject: [PATCH 2/3] fix dist/* --- dist/cleanup/index.js | 5618 ++++++++++++++++++++++++++--------------- dist/setup/index.js | 5006 +++++++++++++++++++++++------------- 2 files changed, 6726 insertions(+), 3898 deletions(-) diff --git a/dist/cleanup/index.js b/dist/cleanup/index.js index 56f3bb2..ca0dd27 100644 --- a/dist/cleanup/index.js +++ b/dist/cleanup/index.js @@ -1130,6 +1130,11 @@ function assertDefined(name, value) { return value; } exports.assertDefined = assertDefined; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; //# sourceMappingURL=cacheUtils.js.map /***/ }), @@ -1833,17 +1838,7 @@ exports.parseURL = __webpack_require__(936).parseURL; /***/ }), -/* 71 */ -/***/ (function() { - -"use strict"; - -if (typeof Symbol === undefined || !Symbol.asyncIterator) { - Symbol.asyncIterator = Symbol.for("Symbol.asyncIterator"); -} -//# sourceMappingURL=index.js.map - -/***/ }), +/* 71 */, /* 72 */, /* 73 */, /* 74 */, @@ -3197,7 +3192,19 @@ Object.defineProperty(exports, "__esModule", { value: true }); /***/ }), /* 96 */, -/* 97 */, +/* 97 */ +/***/ (function() { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +if (typeof Symbol === undefined || !Symbol.asyncIterator) { + Symbol.asyncIterator = Symbol.for("Symbol.asyncIterator"); +} +//# sourceMappingURL=index.js.map + +/***/ }), /* 98 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -3808,18 +3815,18 @@ function downloadCache(archiveLocation, archivePath, options) { exports.downloadCache = downloadCache; // Reserve Cache function reserveCache(key, paths, options) { - var _a, _b; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const reserveCacheRequest = { key, - version + version, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize }; const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); })); - return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1; + return response; }); } exports.reserveCache = reserveCache; @@ -3956,14 +3963,15 @@ var DiagAPI = /** @class */ (function () { function DiagAPI() { function _logProxy(funcName) { return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } var logger = global_utils_1.getGlobal('diag'); // shortcut if logger not set if (!logger) return; - return logger[funcName].apply(logger, - // work around Function.prototype.apply types - // eslint-disable-next-line @typescript-eslint/no-explicit-any - arguments); + return logger[funcName].apply(logger, args); }; } // Using self local variable for minification purposes as 'this' cannot be minified @@ -4641,7 +4649,7 @@ exports.ROOT_CONTEXT = new BaseContext(); Object.defineProperty(exports, "__esModule", { value: true }); exports.VERSION = void 0; // this is autogenerated file, see scripts/version-update.js -exports.VERSION = '1.0.3'; +exports.VERSION = '1.0.4'; //# sourceMappingURL=version.js.map /***/ }), @@ -6072,7 +6080,7 @@ module.exports = require("https"); /* 213 */ /***/ (function(module) { -module.exports = require("punycode"); +module.exports = require("timers"); /***/ }), /* 214 */, @@ -6543,17 +6551,22 @@ var DiagConsoleLogger = /** @class */ (function () { function DiagConsoleLogger() { function _consoleFunc(funcName) { return function () { - var orgArguments = arguments; + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } if (console) { // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console var theFunc = console[funcName]; if (typeof theFunc !== 'function') { // Not all environments support all functions + // eslint-disable-next-line no-console theFunc = console.log; } // One last final check if (typeof theFunc === 'function') { - return theFunc.apply(console, orgArguments); + return theFunc.apply(console, args); } } }; @@ -9034,7 +9047,7 @@ module.exports = satisfies XMLDocumentCB = __webpack_require__(768); - XMLStringWriter = __webpack_require__(347); + XMLStringWriter = __webpack_require__(750); XMLStreamWriter = __webpack_require__(458); @@ -9282,26 +9295,53 @@ var MatchKind; Object.defineProperty(exports, '__esModule', { value: true }); -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - var uuid = __webpack_require__(585); -var tough = __webpack_require__(393); +var util = __webpack_require__(669); +var tslib = __webpack_require__(865); +var xml2js = __webpack_require__(992); +var abortController = __webpack_require__(106); +var logger$1 = __webpack_require__(492); +var coreAuth = __webpack_require__(229); +var os = __webpack_require__(87); var http = __webpack_require__(605); var https = __webpack_require__(211); -var node_fetch = _interopDefault(__webpack_require__(454)); -var abortController = __webpack_require__(106); -var FormData = _interopDefault(__webpack_require__(790)); -var util = __webpack_require__(669); -var url = __webpack_require__(835); -var stream = __webpack_require__(794); -var logger$1 = __webpack_require__(492); +var tough = __webpack_require__(393); var tunnel = __webpack_require__(413); -var tslib = __webpack_require__(865); -var coreAuth = __webpack_require__(229); -var xml2js = __webpack_require__(992); -var os = __webpack_require__(87); +var stream = __webpack_require__(794); +var FormData = __webpack_require__(790); +var node_fetch = __webpack_require__(454); var coreTracing = __webpack_require__(263); -__webpack_require__(71); +var url = __webpack_require__(835); +__webpack_require__(97); + +function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } + +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var xml2js__namespace = /*#__PURE__*/_interopNamespace(xml2js); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var http__namespace = /*#__PURE__*/_interopNamespace(http); +var https__namespace = /*#__PURE__*/_interopNamespace(https); +var tough__namespace = /*#__PURE__*/_interopNamespace(tough); +var tunnel__namespace = /*#__PURE__*/_interopNamespace(tunnel); +var FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData); +var node_fetch__default = /*#__PURE__*/_interopDefaultLegacy(node_fetch); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -9350,7 +9390,7 @@ class HttpHeaders { set(headerName, headerValue) { this._headersMap[getHeaderKey(headerName)] = { name: headerName, - value: headerValue.toString() + value: headerValue.toString(), }; } /** @@ -9382,12 +9422,7 @@ class HttpHeaders { * Get the headers that are contained this collection as an object. */ rawHeaders() { - const result = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[header.name.toLowerCase()] = header.value; - } - return result; + return this.toJson({ preserveCase: true }); } /** * Get the headers that are contained in this collection as an array. @@ -9424,14 +9459,27 @@ class HttpHeaders { /** * Get the JSON object representation of this HTTP header collection. */ - toJson() { - return this.rawHeaders(); + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; } /** * Get the string representation of this HTTP header collection. */ toString() { - return JSON.stringify(this.toJson()); + return JSON.stringify(this.toJson({ preserveCase: true })); } /** * Create a deep clone/copy of this HttpHeaders collection. @@ -9475,11 +9523,14 @@ function decodeString(value) { // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ const Constants = { /** * The core-http version */ - coreHttpVersion: "2.2.2", + coreHttpVersion: "2.2.4", /** * Specifies HTTP. */ @@ -9515,12 +9566,12 @@ const Constants = { POST: "POST", MERGE: "MERGE", HEAD: "HEAD", - PATCH: "PATCH" + PATCH: "PATCH", }, StatusCodes: { TooManyRequests: 429, - ServiceUnavailable: 503 - } + ServiceUnavailable: 503, + }, }, /** * Defines constants for use with HTTP headers. @@ -9540,8 +9591,8 @@ const Constants = { /** * The UserAgent header. */ - USER_AGENT: "User-Agent" - } + USER_AGENT: "User-Agent", + }, }; // Copyright (c) Microsoft Corporation. @@ -9756,18 +9807,38 @@ function isObject(input) { } // Copyright (c) Microsoft Corporation. +// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ class Serializer { - constructor(modelMappers = {}, isXML) { + constructor( + /** + * The provided model mapper. + */ + modelMappers = {}, + /** + * Whether the contents are XML or not. + */ + isXML) { this.modelMappers = modelMappers; this.isXML = isXML; } + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + */ validateConstraints(mapper, value, objectName) { const failValidation = (constraintName, constraintValue) => { throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); }; if (mapper.constraints && value != undefined) { const valueAsNumber = value; - const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems } = mapper.constraints; + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { failValidation("ExclusiveMaximum", ExclusiveMaximum); } @@ -9809,20 +9880,20 @@ class Serializer { } } /** - * Serialize the given object based on its metadata defined in the mapper + * Serialize the given object based on its metadata defined in the mapper. * - * @param mapper - The mapper which defines the metadata of the serializable object - * @param object - A valid Javascript object to be serialized - * @param objectName - Name of the serialized object - * @param options - additional options to deserialization - * @returns A valid serialized Javascript object + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. */ serialize(mapper, object, objectName, options = {}) { var _a, _b, _c; const updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, }; let payload = {}; const mapperType = mapper.type.name; @@ -9892,20 +9963,20 @@ class Serializer { return payload; } /** - * Deserialize the given object based on its metadata defined in the mapper + * Deserialize the given object based on its metadata defined in the mapper. * - * @param mapper - The mapper which defines the metadata of the serializable object - * @param responseBody - A valid Javascript entity to be deserialized - * @param objectName - Name of the deserialized object + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. * @param options - Controls behavior of XML parser and builder. - * @returns A valid deserialized Javascript object + * @returns A valid deserialized Javascript object. */ deserialize(mapper, responseBody, objectName, options = {}) { var _a, _b, _c; const updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, }; if (responseBody == undefined) { if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { @@ -10004,9 +10075,7 @@ function bufferToBase64Url(buffer) { // Uint8Array to Base64. const str = encodeByteArray(buffer); // Base64 to Base64Url. - return trimEnd(str, "=") - .replace(/\+/g, "-") - .replace(/\//g, "_"); + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); } function base64UrlToByteArray(str) { if (!str) { @@ -10222,10 +10291,10 @@ function serializeDictionaryType(serializer, mapper, object, objectName, isXml, return tempDictionary; } /** - * Resolves the additionalProperties property from a referenced mapper - * @param serializer - The serializer containing the entire set of mappers - * @param mapper - The composite mapper to resolve - * @param objectName - Name of the object being serialized + * Resolves the additionalProperties property from a referenced mapper. + * @param serializer - The serializer containing the entire set of mappers. + * @param mapper - The composite mapper to resolve. + * @param objectName - Name of the object being serialized. */ function resolveAdditionalProperties(serializer, mapper, objectName) { const additionalProperties = mapper.type.additionalProperties; @@ -10236,7 +10305,7 @@ function resolveAdditionalProperties(serializer, mapper, objectName) { return additionalProperties; } /** - * Finds the mapper referenced by className + * Finds the mapper referenced by `className`. * @param serializer - The serializer containing the entire set of mappers * @param mapper - The composite mapper to resolve * @param objectName - Name of the object being serialized @@ -10575,7 +10644,9 @@ function getPolymorphicDiscriminatorSafely(serializer, typeName) { serializer.modelMappers[typeName] && serializer.modelMappers[typeName].type.polymorphicDiscriminator); } -// TODO: why is this here? +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ function serializeObject(toSerialize) { const castToSerialize = toSerialize; if (toSerialize == undefined) @@ -10613,6 +10684,9 @@ function strEnum(o) { } return result; } +/** + * String enum containing the string types of property mappers. + */ // eslint-disable-next-line @typescript-eslint/no-redeclare const MapperType = strEnum([ "Base64Url", @@ -10630,7 +10704,7 @@ const MapperType = strEnum([ "String", "Stream", "TimeSpan", - "UnixTime" + "UnixTime", ]); // Copyright (c) Microsoft Corporation. @@ -10893,9 +10967,6 @@ class WebResource { } } -// Copyright (c) Microsoft Corporation. -const custom = util.inspect.custom; - // Copyright (c) Microsoft Corporation. /** * A class that handles the query portion of a URLBuilder. @@ -11193,6 +11264,10 @@ class URLBuilder { } } } + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ toString() { let result = ""; if (this._scheme) { @@ -11228,6 +11303,9 @@ class URLBuilder { this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); } } + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ static parse(text) { const result = new URLBuilder(); result.set(text, "SCHEME_OR_HOST"); @@ -11484,6 +11562,60 @@ function nextQuery(tokenizer) { tokenizer._currentState = "DONE"; } +// Copyright (c) Microsoft Corporation. +function createProxyAgent(requestUrl, proxySettings, headers) { + const host = URLBuilder.parse(proxySettings.host).getHost(); + if (!host) { + throw new Error("Expecting a non-empty host in proxy settings."); + } + if (!isValidPort(proxySettings.port)) { + throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); + } + const tunnelOptions = { + proxy: { + host: host, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + } + const isRequestHttps = isUrlHttps(requestUrl); + const isProxyHttps = isUrlHttps(proxySettings.host); + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +function isUrlHttps(url) { + const urlScheme = URLBuilder.parse(url).getScheme() || ""; + return urlScheme.toLowerCase() === "https"; +} +function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel__namespace.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpOverHttps(tunnelOptions); + } + else { + return tunnel__namespace.httpOverHttp(tunnelOptions); + } +} +function isValidPort(port) { + // any port in 0-65535 range is valid (RFC 793) even though almost all implementations + // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports + return 0 <= port && port <= 65535; +} + // Copyright (c) Microsoft Corporation. const RedactedString = "REDACTED"; const defaultAllowedHeaderNames = [ @@ -11524,7 +11656,7 @@ const defaultAllowedHeaderNames = [ "Retry-After", "Server", "Transfer-Encoding", - "User-Agent" + "User-Agent", ]; const defaultAllowedQueryParameters = ["api-version"]; class Sanitizer { @@ -11617,8 +11749,14 @@ class Sanitizer { } } +// Copyright (c) Microsoft Corporation. +const custom = util.inspect.custom; + // Copyright (c) Microsoft Corporation. const errorSanitizer = new Sanitizer(); +/** + * An error resulting from an HTTP request to a service endpoint. + */ class RestError extends Error { constructor(message, code, statusCode, request, response) { super(message); @@ -11636,13 +11774,22 @@ class RestError extends Error { return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; } } +/** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ RestError.PARSE_ERROR = "PARSE_ERROR"; // Copyright (c) Microsoft Corporation. const logger = logger$1.createClientLogger("core-http"); // Copyright (c) Microsoft Corporation. +function getCachedAgent(isHttps, agentCache) { + return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; +} class ReportTransform extends stream.Transform { constructor(progressCallback) { super(); @@ -11656,7 +11803,44 @@ class ReportTransform extends stream.Transform { callback(undefined); } } -class FetchHttpClient { +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream, aborter) { + return new Promise((resolve) => { + stream.once("close", () => { + aborter === null || aborter === void 0 ? void 0 : aborter.abort(); + resolve(); + }); + stream.once("end", resolve); + stream.once("error", resolve); + }); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +function parseHeaders(headers) { + const httpHeaders = new HttpHeaders(); + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +/** + * An HTTP client that uses `node-fetch`. + */ +class NodeFetchHttpClient { + constructor() { + // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent + this.proxyAgentMap = new Map(); + this.keepAliveAgents = {}; + this.cookieJar = new tough__namespace.CookieJar(undefined, { looseMode: true }); + } + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ async sendRequest(httpRequest) { var _a; if (!httpRequest && typeof httpRequest !== "object") { @@ -11682,7 +11866,7 @@ class FetchHttpClient { } if (httpRequest.formData) { const formData = httpRequest.formData; - const requestForm = new FormData(); + const requestForm = new FormData__default["default"](); const appendFormValue = (key, value) => { // value function probably returns a stream so we can provide a fresh stream on each retry if (typeof value === "function") { @@ -11752,7 +11936,7 @@ class FetchHttpClient { readableStreamBody: streaming ? response.body : undefined, - bodyAsText: !streaming ? await response.text() : undefined + bodyAsText: !streaming ? await response.text() : undefined, }; const onDownloadProgress = httpRequest.onDownloadProgress; if (onDownloadProgress) { @@ -11806,94 +11990,6 @@ class FetchHttpClient { } } } -} -function isReadableStream(body) { - return body && typeof body.pipe === "function"; -} -function isStreamComplete(stream, aborter) { - return new Promise((resolve) => { - stream.once("close", () => { - aborter === null || aborter === void 0 ? void 0 : aborter.abort(); - resolve(); - }); - stream.once("end", resolve); - stream.once("error", resolve); - }); -} -function parseHeaders(headers) { - const httpHeaders = new HttpHeaders(); - headers.forEach((value, key) => { - httpHeaders.set(key, value); - }); - return httpHeaders; -} - -// Copyright (c) Microsoft Corporation. -function createProxyAgent(requestUrl, proxySettings, headers) { - const host = URLBuilder.parse(proxySettings.host).getHost(); - if (!host) { - throw new Error("Expecting a non-empty host in proxy settings."); - } - if (!isValidPort(proxySettings.port)) { - throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); - } - const tunnelOptions = { - proxy: { - host: host, - port: proxySettings.port, - headers: (headers && headers.rawHeaders()) || {} - } - }; - if (proxySettings.username && proxySettings.password) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; - } - else if (proxySettings.username) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; - } - const isRequestHttps = isUrlHttps(requestUrl); - const isProxyHttps = isUrlHttps(proxySettings.host); - const proxyAgent = { - isHttps: isRequestHttps, - agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) - }; - return proxyAgent; -} -function isUrlHttps(url) { - const urlScheme = URLBuilder.parse(url).getScheme() || ""; - return urlScheme.toLowerCase() === "https"; -} -function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { - if (isRequestHttps && isProxyHttps) { - return tunnel.httpsOverHttps(tunnelOptions); - } - else if (isRequestHttps && !isProxyHttps) { - return tunnel.httpsOverHttp(tunnelOptions); - } - else if (!isRequestHttps && isProxyHttps) { - return tunnel.httpOverHttps(tunnelOptions); - } - else { - return tunnel.httpOverHttp(tunnelOptions); - } -} -function isValidPort(port) { - // any port in 0-65535 range is valid (RFC 793) even though almost all implementations - // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports - return 0 <= port && port <= 65535; -} - -// Copyright (c) Microsoft Corporation. -function getCachedAgent(isHttps, agentCache) { - return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; -} -class NodeFetchHttpClient extends FetchHttpClient { - constructor() { - super(...arguments); - // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent - this.proxyAgentMap = new Map(); - this.keepAliveAgents = {}; - this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); - } getOrCreateAgent(httpRequest) { var _a; const isHttps = isUrlHttps(httpRequest.url); @@ -11925,24 +12021,30 @@ class NodeFetchHttpClient extends FetchHttpClient { return agent; } const agentOptions = { - keepAlive: httpRequest.keepAlive + keepAlive: httpRequest.keepAlive, }; if (isHttps) { - agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions); + agent = this.keepAliveAgents.httpsAgent = new https__namespace.Agent(agentOptions); } else { - agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions); + agent = this.keepAliveAgents.httpAgent = new http__namespace.Agent(agentOptions); } return agent; } else { - return isHttps ? https.globalAgent : http.globalAgent; + return isHttps ? https__namespace.globalAgent : http__namespace.globalAgent; } } + /** + * Uses `node-fetch` to perform the request. + */ // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs async fetch(input, init) { - return node_fetch(input, init); + return node_fetch__default["default"](input, init); } + /** + * Prepares a request based on the provided web resource. + */ async prepareRequest(httpRequest) { const requestInit = {}; if (this.cookieJar && !httpRequest.headers.get("Cookie")) { @@ -11963,6 +12065,9 @@ class NodeFetchHttpClient extends FetchHttpClient { requestInit.compress = httpRequest.decompressResponse; return requestInit; } + /** + * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a "Set-Cookie" header. + */ async processRequest(operationResponse) { if (this.cookieJar) { const setCookieHeader = operationResponse.headers.get("Set-Cookie"); @@ -11983,6 +12088,11 @@ class NodeFetchHttpClient extends FetchHttpClient { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +exports.HttpPipelineLogLevel = void 0; (function (HttpPipelineLogLevel) { /** * A log level that indicates that no logs will be logged. @@ -12002,6 +12112,7 @@ class NodeFetchHttpClient extends FetchHttpClient { HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; })(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); +// Copyright (c) Microsoft Corporation. /** * Converts an OperationOptions to a RequestOptionsBase * @@ -12023,8 +12134,22 @@ function operationOptionsToRequestOptionsBase(opts) { } // Copyright (c) Microsoft Corporation. +/** + * The base class from which all request policies derive. + */ class BaseRequestPolicy { - constructor(_nextPolicy, _options) { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { this._nextPolicy = _nextPolicy; this._options = _options; } @@ -12076,113 +12201,6 @@ class RequestPolicyOptions { } } -// Copyright (c) Microsoft Corporation. -function logPolicy(loggingOptions = {}) { - return { - create: (nextPolicy, options) => { - return new LogPolicy(nextPolicy, options, loggingOptions); - } - }; -} -class LogPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { - super(nextPolicy, options); - this.logger = logger$1; - this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - get allowedHeaderNames() { - return this.sanitizer.allowedHeaderNames; - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - set allowedHeaderNames(allowedHeaderNames) { - this.sanitizer.allowedHeaderNames = allowedHeaderNames; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - get allowedQueryParameters() { - return this.sanitizer.allowedQueryParameters; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - set allowedQueryParameters(allowedQueryParameters) { - this.sanitizer.allowedQueryParameters = allowedQueryParameters; - } - sendRequest(request) { - if (!this.logger.enabled) - return this._nextPolicy.sendRequest(request); - this.logRequest(request); - return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); - } - logRequest(request) { - this.logger(`Request: ${this.sanitizer.sanitize(request)}`); - } - logResponse(response) { - this.logger(`Response status code: ${response.status}`); - this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); - return response; - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Get the path to this parameter's value as a dotted string (a.b.c). - * @param parameter - The parameter to get the path string for. - * @returns The path to this parameter's value as a dotted string. - */ -function getPathStringFromParameter(parameter) { - return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); -} -function getPathStringFromParameterPath(parameterPath, mapper) { - let result; - if (typeof parameterPath === "string") { - result = parameterPath; - } - else if (Array.isArray(parameterPath)) { - result = parameterPath.join("."); - } - else { - result = mapper.serializedName; - } - return result; -} - -// Copyright (c) Microsoft Corporation. -/** - * Gets the list of status codes for streaming responses. - * @internal - */ -function getStreamResponseStatusCodes(operationSpec) { - const result = new Set(); - for (const statusCode in operationSpec.responses) { - const operationResponse = operationSpec.responses[statusCode]; - if (operationResponse.bodyMapper && - operationResponse.bodyMapper.type.name === MapperType.Stream) { - result.add(Number(statusCode)); - } - } - return result; -} - // Copyright (c) Microsoft Corporation. // Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed // by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 @@ -12215,18 +12233,18 @@ const xml2jsDefaultOptionsV2 = { xmldec: { version: "1.0", encoding: "UTF-8", - standalone: true + standalone: true, }, doctype: undefined, renderOpts: { pretty: true, indent: " ", - newline: "\n" + newline: "\n", }, headless: false, chunkSize: 10000, emptyTag: "", - cdata: false + cdata: false, }; // The xml2js settings for general XML parsing operations. const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); @@ -12235,7 +12253,7 @@ xml2jsParserSettings.explicitArray = false; const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); xml2jsBuilderSettings.explicitArray = false; xml2jsBuilderSettings.renderOpts = { - pretty: false + pretty: false, }; /** * Converts given JSON object to XML string @@ -12246,7 +12264,7 @@ function stringifyXML(obj, opts = {}) { var _a; xml2jsBuilderSettings.rootName = opts.rootName; xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const builder = new xml2js.Builder(xml2jsBuilderSettings); + const builder = new xml2js__namespace.Builder(xml2jsBuilderSettings); return builder.buildObject(obj); } /** @@ -12258,7 +12276,7 @@ function parseXML(str, opts = {}) { var _a; xml2jsParserSettings.explicitRoot = !!opts.includeRoot; xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const xmlParser = new xml2js.Parser(xml2jsParserSettings); + const xmlParser = new xml2js__namespace.Parser(xml2jsParserSettings); return new Promise((resolve, reject) => { if (!str) { reject(new Error("Document is empty")); @@ -12285,7 +12303,7 @@ function deserializationPolicy(deserializationContentTypes, parsingOptions) { return { create: (nextPolicy, options) => { return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); - } + }, }; } const defaultJsonContentTypes = ["application/json", "text/json"]; @@ -12293,8 +12311,8 @@ const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; const DefaultDeserializationOptions = { expectedContentTypes: { json: defaultJsonContentTypes, - xml: defaultXmlContentTypes - } + xml: defaultXmlContentTypes, + }, }; /** * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the @@ -12312,7 +12330,7 @@ class DeserializationPolicy extends BaseRequestPolicy { } async sendRequest(request) { return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { - xmlCharKey: this.xmlCharKey + xmlCharKey: this.xmlCharKey, })); } } @@ -12345,12 +12363,20 @@ function shouldDeserializeResponse(parsedResponse) { } return result; } +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { var _a, _b, _c; const updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, }; return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { if (!shouldDeserializeResponse(parsedResponse)) { @@ -12501,6 +12527,113 @@ function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { return Promise.resolve(operationResponse); } +// Copyright (c) Microsoft Corporation. +/** + * By default, HTTP connections are maintained for future requests. + */ +const DefaultKeepAliveOptions = { + enable: true, +}; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +function keepAlivePolicy(keepAliveOptions) { + return { + create: (nextPolicy, options) => { + return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); + }, + }; +} +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +class KeepAlivePolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy, options, keepAliveOptions) { + super(nextPolicy, options); + this.keepAliveOptions = keepAliveOptions; + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.keepAlive = this.keepAliveOptions.enable; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +const DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +function redirectPolicy(maximumRetries = 20) { + return { + create: (nextPolicy, options) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +class RedirectPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, maxRetries = 20) { + super(nextPolicy, options); + this.maxRetries = maxRetries; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} +function handleRedirect(policy, response, currentRetries) { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + (!policy.maxRetries || currentRetries < policy.maxRetries)) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)); + } + return Promise.resolve(response); +} + // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. const DEFAULT_CLIENT_RETRY_COUNT = 3; @@ -12564,7 +12697,7 @@ function isDefined(thing) { } // Copyright (c) Microsoft Corporation. -const StandardAbortMessage = "The operation was aborted."; +const StandardAbortMessage$1 = "The operation was aborted."; /** * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. * @param delayInMs - The number of milliseconds to be delayed. @@ -12579,7 +12712,7 @@ function delay(delayInMs, value, options) { let timer = undefined; let onAborted = undefined; const rejectOnAbort = () => { - return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage$1)); }; const removeListeners = () => { if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { @@ -12607,20 +12740,34 @@ function delay(delayInMs, value, options) { } // Copyright (c) Microsoft Corporation. +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { return { create: (nextPolicy, options) => { return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); - } + }, }; } +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +exports.RetryMode = void 0; (function (RetryMode) { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; })(exports.RetryMode || (exports.RetryMode = {})); const DefaultRetryOptions = { maxRetries: DEFAULT_CLIENT_RETRY_COUNT, retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, - maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL + maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, }; /** * Instantiates a new "ExponentialRetryPolicyFilter" instance. @@ -12645,11 +12792,11 @@ class ExponentialRetryPolicy extends BaseRequestPolicy { sendRequest(request) { return this._nextPolicy .sendRequest(request.clone()) - .then((response) => retry(this, request, response)) - .catch((error) => retry(this, request, error.response, undefined, error)); + .then((response) => retry$1(this, request, response)) + .catch((error) => retry$1(this, request, error.response, undefined, error)); } } -async function retry(policy, request, response, retryData, requestError) { +async function retry$1(policy, request, response, retryData, requestError) { function shouldPolicyRetry(responseParam) { const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { @@ -12666,7 +12813,7 @@ async function retry(policy, request, response, retryData, requestError) { retryData = updateRetryData({ retryInterval: policy.retryInterval, minRetryInterval: 0, - maxRetryInterval: policy.maxRetryInterval + maxRetryInterval: policy.maxRetryInterval, }, retryData, requestError); const isAborted = request.abortSignal && request.abortSignal.aborted; if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { @@ -12674,10 +12821,10 @@ async function retry(policy, request, response, retryData, requestError) { try { await delay(retryData.retryInterval); const res = await policy._nextPolicy.sendRequest(request.clone()); - return retry(policy, request, res, retryData); + return retry$1(policy, request, res, retryData); } catch (err) { - return retry(policy, request, response, retryData, err); + return retry$1(policy, request, response, retryData, err); } } else if (isAborted || requestError || !response) { @@ -12692,11 +12839,467 @@ async function retry(policy, request, response, retryData, requestError) { } // Copyright (c) Microsoft Corporation. +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +function logPolicy(loggingOptions = {}) { + return { + create: (nextPolicy, options) => { + return new LogPolicy(nextPolicy, options, loggingOptions); + }, + }; +} +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +class LogPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { + super(nextPolicy, options); + this.logger = logger$1; + this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames() { + return this.sanitizer.allowedHeaderNames; + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames) { + this.sanitizer.allowedHeaderNames = allowedHeaderNames; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters() { + return this.sanitizer.allowedQueryParameters; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters) { + this.sanitizer.allowedQueryParameters = allowedQueryParameters; + } + sendRequest(request) { + if (!this.logger.enabled) + return this._nextPolicy.sendRequest(request); + this.logRequest(request); + return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); + } + logRequest(request) { + this.logger(`Request: ${this.sanitizer.sanitize(request)}`); + } + logResponse(response) { + this.logger(`Response status code: ${response.status}`); + this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); + return response; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +function getPathStringFromParameterPath(parameterPath, mapper) { + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} + +// Copyright (c) Microsoft Corporation. +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +function getStreamResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} + +// Copyright (c) Microsoft Corporation. +function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +function getPlatformSpecificData() { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + const osInfo = { + key: "OS", + value: `(${os__namespace.arch()}-${os__namespace.type()}-${os__namespace.release()})`, + }; + return [runtimeInfo, osInfo]; +} + +// Copyright (c) Microsoft Corporation. +function getRuntimeInfo() { + const msRestRuntime = { + key: "core-http", + value: Constants.coreHttpVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} +const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +function getDefaultUserAgentValue() { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +function userAgentPolicy(userAgentData) { + const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null + ? getDefaultUserAgentKey() + : userAgentData.key; + const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: (nextPolicy, options) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +class UserAgentPolicy extends BaseRequestPolicy { + constructor(_nextPolicy, _options, headerKey, headerValue) { + super(_nextPolicy, _options); + this._nextPolicy = _nextPolicy; + this._options = _options; + this.headerKey = headerKey; + this.headerValue = headerValue; + } + sendRequest(request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +exports.QueryCollectionFormat = void 0; +(function (QueryCollectionFormat) { + /** + * CSV: Each pair of segments joined by a single comma. + */ + QueryCollectionFormat["Csv"] = ","; + /** + * SSV: Each pair of segments joined by a single space character. + */ + QueryCollectionFormat["Ssv"] = " "; + /** + * TSV: Each pair of segments joined by a single tab character. + */ + QueryCollectionFormat["Tsv"] = "\t"; + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + QueryCollectionFormat["Pipes"] = "|"; + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + QueryCollectionFormat["Multi"] = "Multi"; +})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + +// Copyright (c) Microsoft Corporation. +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function bearerTokenAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + const getToken = createTokenCycler(credential, scopes /* , options */); + class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const { token } = await getToken({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + }); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return { + create: (nextPolicy, options) => { + return new BearerTokenAuthenticationPolicy(nextPolicy, options); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +function disableResponseDecompressionPolicy() { + return { + create: (nextPolicy, options) => { + return new DisableResponseDecompressionPolicy(nextPolicy, options); + }, + }; +} +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + // The parent constructor is protected. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.decompressResponse = false; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { return { create: (nextPolicy, options) => { return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); - } + }, }; } class GenerateClientRequestIdPolicy extends BaseRequestPolicy { @@ -12713,130 +13316,190 @@ class GenerateClientRequestIdPolicy extends BaseRequestPolicy { } // Copyright (c) Microsoft Corporation. -function getDefaultUserAgentKey() { - return Constants.HeaderConstants.USER_AGENT; -} -function getPlatformSpecificData() { - const runtimeInfo = { - key: "Node", - value: process.version - }; - const osInfo = { - key: "OS", - value: `(${os.arch()}-${os.type()}-${os.release()})` - }; - return [runtimeInfo, osInfo]; +let cachedHttpClient; +function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = new NodeFetchHttpClient(); + } + return cachedHttpClient; } // Copyright (c) Microsoft Corporation. -function getRuntimeInfo() { - const msRestRuntime = { - key: "core-http", - value: Constants.coreHttpVersion - }; - return [msRestRuntime]; -} -function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { - return telemetryInfo - .map((info) => { - const value = info.value ? `${valueSeparator}${info.value}` : ""; - return `${info.key}${value}`; - }) - .join(keySeparator); -} -const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; -function getDefaultUserAgentValue() { - const runtimeInfo = getRuntimeInfo(); - const platformSpecificData = getPlatformSpecificData(); - const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); - return userAgent; -} -function userAgentPolicy(userAgentData) { - const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null - ? getDefaultUserAgentKey() - : userAgentData.key; - const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null - ? getDefaultUserAgentValue() - : userAgentData.value; +function ndJsonPolicy() { return { create: (nextPolicy, options) => { - return new UserAgentPolicy(nextPolicy, options, key, value); - } + return new NdJsonPolicy(nextPolicy, options); + }, }; } -class UserAgentPolicy extends BaseRequestPolicy { - constructor(_nextPolicy, _options, headerKey, headerValue) { - super(_nextPolicy, _options); - this._nextPolicy = _nextPolicy; - this._options = _options; - this.headerKey = headerKey; - this.headerValue = headerValue; +/** + * NdJsonPolicy that formats a JSON array as newline-delimited JSON + */ +class NdJsonPolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + */ + constructor(nextPolicy, options) { + super(nextPolicy, options); } - sendRequest(request) { - this.addUserAgentHeader(request); + /** + * Sends a request. + */ + async sendRequest(request) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } return this._nextPolicy.sendRequest(request); } - addUserAgentHeader(request) { - if (!request.headers) { - request.headers = new HttpHeaders(); - } - if (!request.headers.get(this.headerKey) && this.headerValue) { - request.headers.set(this.headerKey, this.headerValue); - } - } } // Copyright (c) Microsoft Corporation. /** - * Methods that are allowed to follow redirects 301 and 302 + * Stores the patterns specified in NO_PROXY environment variable. + * @internal */ -const allowedRedirect = ["GET", "HEAD"]; -const DefaultRedirectOptions = { - handleRedirects: true, - maxRetries: 20 -}; -function redirectPolicy(maximumRetries = 20) { - return { - create: (nextPolicy, options) => { - return new RedirectPolicy(nextPolicy, options, maximumRetries); +const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost(); + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +function loadNoProxy() { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, }; } -class RedirectPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, maxRetries = 20) { +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +function proxyPolicy(proxySettings, options) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); + } + return { + create: (nextPolicy, requestPolicyOptions) => { + return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + }, + }; +} +function extractAuthFromUrl(url) { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} +class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, proxySettings, customNoProxyList) { super(nextPolicy, options); - this.maxRetries = maxRetries; + this.proxySettings = proxySettings; + this.customNoProxyList = customNoProxyList; } sendRequest(request) { - return this._nextPolicy - .sendRequest(request) - .then((response) => handleRedirect(this, response, 0)); - } -} -function handleRedirect(policy, response, currentRetries) { - const { request, status } = response; - const locationHeader = response.headers.get("location"); - if (locationHeader && - (status === 300 || - (status === 301 && allowedRedirect.includes(request.method)) || - (status === 302 && allowedRedirect.includes(request.method)) || - (status === 303 && request.method === "POST") || - status === 307) && - (!policy.maxRetries || currentRetries < policy.maxRetries)) { - const builder = URLBuilder.parse(request.url); - builder.setPath(locationHeader); - request.url = builder.toString(); - // POST request with Status code 303 should be converted into a - // redirected GET request if the redirect url is present in the location header - if (status === 303) { - request.method = "GET"; - delete request.body; + var _a; + if (!request.proxySettings && + !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { + request.proxySettings = this.proxySettings; } - return policy._nextPolicy - .sendRequest(request) - .then((res) => handleRedirect(policy, res, currentRetries + 1)); + return this._nextPolicy.sendRequest(request); } - return Promise.resolve(response); } // Copyright (c) Microsoft Corporation. @@ -12844,7 +13507,7 @@ function rpRegistrationPolicy(retryTimeout = 30) { return { create: (nextPolicy, options) => { return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); - } + }, }; } class RPRegistrationPolicy extends BaseRequestPolicy { @@ -12989,193 +13652,52 @@ async function getRegistrationStatus(policy, url, originalRequest) { } // Copyright (c) Microsoft Corporation. -// Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1000, - retryIntervalInMs: 3000, - refreshWindowInMs: 1000 * 60 * 2 // Start refreshing 2m before expiry -}; /** - * Converts an an unreliable access token getter (which may resolve with null) - * into an AccessTokenGetter by retrying the unreliable getter in a regular - * interval. - * - * @param getAccessToken - a function that produces a promise of an access - * token that may fail by returning null - * @param retryIntervalInMs - the time (in milliseconds) to wait between retry - * attempts - * @param timeoutInMs - the timestamp after which the refresh attempt will fail, - * throwing an exception - * @returns - a promise that, if it resolves, will resolve with an access token + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } - } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; - } - } - let token = await tryGetAccessToken(); - while (token === null) { - await delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; -} -/** - * Creates a token cycler from a credential, scopes, and optional settings. - * - * A token cycler represents a way to reliably retrieve a valid access token - * from a TokenCredential. It will handle initializing the token, refreshing it - * when it nears expiration, and synchronizes refresh attempts to avoid - * concurrency hazards. - * - * @param credential - the underlying TokenCredential that provides the access - * token - * @param scopes - the scopes to request authorization for - * @param tokenCyclerOptions - optionally override default settings for the cycler - * - * @returns - a function that reliably produces a valid access token - */ -function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); - /** - * This little holder defines several predicates that we use to construct - * the rules of refreshing the token. - */ - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; - }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - var _a; - return (!cycler.isRefreshing && - ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); - }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); - } - }; - /** - * Starts a refresh job or returns the existing job if one is already - * running. - */ - function refresh(getTokenOptions) { - var _a; - if (!cycler.isRefreshing) { - // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - // Take advantage of promise chaining to insert an assignment to `token` - // before the refresh can be considered done. - refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { - refreshWorker = null; - token = _token; - return token; - }) - .catch((reason) => { - // We also should reset the refresher if we enter a failed state. All - // existing awaiters will throw, but subsequent requests will start a - // new retry chain. - refreshWorker = null; - token = null; - throw reason; - }); - } - return refreshWorker; - } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; -} -// #endregion -/** - * Creates a new factory for a RequestPolicy that applies a bearer token to - * the requests' `Authorization` headers. - * - * @param credential - The TokenCredential implementation that can supply the bearer token. - * @param scopes - The scopes for which the bearer token applies. - */ -function bearerTokenAuthenticationPolicy(credential, scopes) { - // This simple function encapsulates the entire process of reliably retrieving the token - const getToken = createTokenCycler(credential, scopes /* , options */); - class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const { token } = await getToken({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext - } - }); - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - return this._nextPolicy.sendRequest(webResource); - } - } +function signingPolicy(authenticationProvider) { return { create: (nextPolicy, options) => { - return new BearerTokenAuthenticationPolicy(nextPolicy, options); - } + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, }; } +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +class SigningPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, authenticationProvider) { + super(nextPolicy, options); + this.authenticationProvider = authenticationProvider; + } + signRequest(request) { + return this.authenticationProvider.signRequest(request); + } + sendRequest(request) { + return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); + } +} // Copyright (c) Microsoft Corporation. +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { return { create: (nextPolicy, options) => { return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); - } + }, }; } /** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". * @param retryCount - The client retry count. * @param retryInterval - The client retry interval, in milliseconds. * @param minRetryInterval - The minimum retry interval, in milliseconds. @@ -13196,10 +13718,10 @@ class SystemErrorRetryPolicy extends BaseRequestPolicy { sendRequest(request) { return this._nextPolicy .sendRequest(request.clone()) - .catch((error) => retry$1(this, request, error.response, error)); + .catch((error) => retry(this, request, error.response, error)); } } -async function retry$1(policy, request, operationResponse, err, retryData) { +async function retry(policy, request, operationResponse, err, retryData) { retryData = updateRetryData(policy, retryData, err); function shouldPolicyRetry(_response, error) { if (error && @@ -13220,7 +13742,7 @@ async function retry$1(policy, request, operationResponse, err, retryData) { return policy._nextPolicy.sendRequest(request.clone()); } catch (nestedErr) { - return retry$1(policy, request, operationResponse, nestedErr, retryData); + return retry(policy, request, operationResponse, nestedErr, retryData); } } else { @@ -13232,155 +13754,6 @@ async function retry$1(policy, request, operationResponse, err, retryData) { } } -// Copyright (c) Microsoft Corporation. -(function (QueryCollectionFormat) { - QueryCollectionFormat["Csv"] = ","; - QueryCollectionFormat["Ssv"] = " "; - QueryCollectionFormat["Tsv"] = "\t"; - QueryCollectionFormat["Pipes"] = "|"; - QueryCollectionFormat["Multi"] = "Multi"; -})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); - -// Copyright (c) Microsoft Corporation. -/** - * Stores the patterns specified in NO_PROXY environment variable. - * @internal - */ -const globalNoProxyList = []; -let noProxyListLoaded = false; -/** A cache of whether a host should bypass the proxy. */ -const globalBypassedMap = new Map(); -function loadEnvironmentProxyValue() { - if (!process) { - return undefined; - } - const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); - const allProxy = getEnvironmentValue(Constants.ALL_PROXY); - const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); - return httpsProxy || allProxy || httpProxy; -} -/** - * Check whether the host of a given `uri` matches any pattern in the no proxy list. - * If there's a match, any request sent to the same host shouldn't have the proxy settings set. - * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 - */ -function isBypassed(uri, noProxyList, bypassedMap) { - if (noProxyList.length === 0) { - return false; - } - const host = URLBuilder.parse(uri).getHost(); - if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { - return bypassedMap.get(host); - } - let isBypassedFlag = false; - for (const pattern of noProxyList) { - if (pattern[0] === ".") { - // This should match either domain it self or any subdomain or host - // .foo.com will match foo.com it self or *.foo.com - if (host.endsWith(pattern)) { - isBypassedFlag = true; - } - else { - if (host.length === pattern.length - 1 && host === pattern.slice(1)) { - isBypassedFlag = true; - } - } - } - else { - if (host === pattern) { - isBypassedFlag = true; - } - } - } - bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); - return isBypassedFlag; -} -/** - * @internal - */ -function loadNoProxy() { - const noProxy = getEnvironmentValue(Constants.NO_PROXY); - noProxyListLoaded = true; - if (noProxy) { - return noProxy - .split(",") - .map((item) => item.trim()) - .filter((item) => item.length); - } - return []; -} -function getDefaultProxySettings(proxyUrl) { - if (!proxyUrl) { - proxyUrl = loadEnvironmentProxyValue(); - if (!proxyUrl) { - return undefined; - } - } - const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); - const parsedUrl = URLBuilder.parse(urlWithoutAuth); - const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; - return { - host: schema + parsedUrl.getHost(), - port: Number.parseInt(parsedUrl.getPort() || "80"), - username, - password - }; -} -/** - * A policy that allows one to apply proxy settings to all requests. - * If not passed static settings, they will be retrieved from the HTTPS_PROXY - * or HTTP_PROXY environment variables. - * @param proxySettings - ProxySettings to use on each request. - * @param options - additional settings, for example, custom NO_PROXY patterns - */ -function proxyPolicy(proxySettings, options) { - if (!proxySettings) { - proxySettings = getDefaultProxySettings(); - } - if (!noProxyListLoaded) { - globalNoProxyList.push(...loadNoProxy()); - } - return { - create: (nextPolicy, requestPolicyOptions) => { - return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); - } - }; -} -function extractAuthFromUrl(url) { - const atIndex = url.indexOf("@"); - if (atIndex === -1) { - return { urlWithoutAuth: url }; - } - const schemeIndex = url.indexOf("://"); - const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; - const auth = url.substring(authStart, atIndex); - const colonIndex = auth.indexOf(":"); - const hasPassword = colonIndex !== -1; - const username = hasPassword ? auth.substring(0, colonIndex) : auth; - const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; - const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); - return { - username, - password, - urlWithoutAuth - }; -} -class ProxyPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, proxySettings, customNoProxyList) { - super(nextPolicy, options); - this.proxySettings = proxySettings; - this.customNoProxyList = customNoProxyList; - } - sendRequest(request) { - var _a; - if (!request.proxySettings && - !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { - request.proxySettings = this.proxySettings; - } - return this._nextPolicy.sendRequest(request); - } -} - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** @@ -13390,15 +13763,28 @@ const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; // Copyright (c) Microsoft Corporation. const StatusCodes = Constants.HttpConstants.StatusCodes; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ function throttlingRetryPolicy() { return { create: (nextPolicy, options) => { return new ThrottlingRetryPolicy(nextPolicy, options); - } + }, }; } -const StandardAbortMessage$1 = "The operation was aborted."; +const StandardAbortMessage = "The operation was aborted."; /** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * * To learn more, please refer to * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and @@ -13429,10 +13815,10 @@ class ThrottlingRetryPolicy extends BaseRequestPolicy { this.numberOfRetries += 1; await delay(delayInMs, undefined, { abortSignal: httpRequest.abortSignal, - abortErrorMsg: StandardAbortMessage$1 + abortErrorMsg: StandardAbortMessage, }); if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw new abortController.AbortError(StandardAbortMessage$1); + throw new abortController.AbortError(StandardAbortMessage); } if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { return this.sendRequest(httpRequest); @@ -13466,77 +13852,26 @@ class ThrottlingRetryPolicy extends BaseRequestPolicy { } } -// Copyright (c) Microsoft Corporation. -function signingPolicy(authenticationProvider) { - return { - create: (nextPolicy, options) => { - return new SigningPolicy(nextPolicy, options, authenticationProvider); - } - }; -} -class SigningPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, authenticationProvider) { - super(nextPolicy, options); - this.authenticationProvider = authenticationProvider; - } - signRequest(request) { - return this.authenticationProvider.signRequest(request); - } - sendRequest(request) { - return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); - } -} - -// Copyright (c) Microsoft Corporation. -const DefaultKeepAliveOptions = { - enable: true -}; -function keepAlivePolicy(keepAliveOptions) { - return { - create: (nextPolicy, options) => { - return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); - } - }; -} -/** - * KeepAlivePolicy is a policy used to control keep alive settings for every request. - */ -class KeepAlivePolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - * - * @param nextPolicy - - * @param options - - * @param keepAliveOptions - - */ - constructor(nextPolicy, options, keepAliveOptions) { - super(nextPolicy, options); - this.keepAliveOptions = keepAliveOptions; - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.keepAlive = this.keepAliveOptions.enable; - return this._nextPolicy.sendRequest(request); - } -} - // Copyright (c) Microsoft Corporation. const createSpan = coreTracing.createSpanFunction({ packagePrefix: "", - namespace: "" + namespace: "", }); +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ function tracingPolicy(tracingOptions = {}) { return { create(nextPolicy, options) { return new TracingPolicy(nextPolicy, options, tracingOptions); - } + }, }; } +/** + * A policy that wraps outgoing requests with a tracing span. + */ class TracingPolicy extends BaseRequestPolicy { constructor(nextPolicy, options, tracingOptions) { super(nextPolicy, options); @@ -13563,14 +13898,13 @@ class TracingPolicy extends BaseRequestPolicy { tryCreateSpan(request) { var _a; try { - const path = URLBuilder.parse(request.url).getPath() || "/"; // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. // We can pass this as a separate parameter once we upgrade to the latest core-tracing. - const { span } = createSpan(path, { + const { span } = createSpan(`HTTP ${request.method}`, { tracingOptions: { spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), - tracingContext: request.tracingContext - } + tracingContext: request.tracingContext, + }, }); // If the span is not recording, don't do any more work. if (!span.isRecording()) { @@ -13584,7 +13918,7 @@ class TracingPolicy extends BaseRequestPolicy { span.setAttributes({ "http.method": request.method, "http.url": request.url, - requestId: request.requestId + requestId: request.requestId, }); if (this.userAgent) { span.setAttribute("http.user_agent", this.userAgent); @@ -13611,7 +13945,7 @@ class TracingPolicy extends BaseRequestPolicy { try { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: err.message + message: err.message, }); if (err.statusCode) { span.setAttribute("http.status_code", err.statusCode); @@ -13630,7 +13964,7 @@ class TracingPolicy extends BaseRequestPolicy { span.setAttribute("serviceRequestId", serviceRequestId); } span.setStatus({ - code: coreTracing.SpanStatusCode.OK + code: coreTracing.SpanStatusCode.OK, }); span.end(); } @@ -13640,88 +13974,6 @@ class TracingPolicy extends BaseRequestPolicy { } } -// Copyright (c) Microsoft Corporation. -/** - * Returns a request policy factory that can be used to create an instance of - * {@link DisableResponseDecompressionPolicy}. - */ -function disableResponseDecompressionPolicy() { - return { - create: (nextPolicy, options) => { - return new DisableResponseDecompressionPolicy(nextPolicy, options); - } - }; -} -/** - * A policy to disable response decompression according to Accept-Encoding header - * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding - */ -class DisableResponseDecompressionPolicy extends BaseRequestPolicy { - /** - * Creates an instance of DisableResponseDecompressionPolicy. - * - * @param nextPolicy - - * @param options - - */ - // The parent constructor is protected. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.decompressResponse = false; - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -function ndJsonPolicy() { - return { - create: (nextPolicy, options) => { - return new NdJsonPolicy(nextPolicy, options); - } - }; -} -/** - * NdJsonPolicy that formats a JSON array as newline-delimited JSON - */ -class NdJsonPolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends a request. - */ - async sendRequest(request) { - // There currently isn't a good way to bypass the serializer - if (typeof request.body === "string" && request.body.startsWith("[")) { - const body = JSON.parse(request.body); - if (Array.isArray(body)) { - request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); - } - } - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -let cachedHttpClient; -function getCachedDefaultHttpClient() { - if (!cachedHttpClient) { - cachedHttpClient = new NodeFetchHttpClient(); - } - return cachedHttpClient; -} - // Copyright (c) Microsoft Corporation. /** * ServiceClient sends service requests and receives responses. @@ -13771,7 +14023,7 @@ class ServiceClient { bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); } return bearerTokenPolicyFactory.create(nextPolicy, createOptions); - } + }, }; }; authPolicyFactory = wrappedPolicyFactory(); @@ -14006,7 +14258,7 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op const updatedOptions = { rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, - xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY + xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, }; const xmlCharKey = serializerOptions.xmlCharKey; if (operationSpec.requestBody && operationSpec.requestBody.mapper) { @@ -14025,13 +14277,13 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op if (typeName === MapperType.Sequence) { httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, - xmlCharKey + xmlCharKey, }); } else if (!isStream) { httpRequest.body = stringifyXML(value, { rootName: xmlName || serializedName, - xmlCharKey + xmlCharKey, }); } } @@ -14115,6 +14367,12 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) { factories.push(logPolicy({ logger: logger.info })); return factories; } +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { const requestPolicyFactories = []; if (pipelineOptions.sendStreamingJson) { @@ -14153,7 +14411,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { } return { httpClient: pipelineOptions.httpClient, - requestPolicyFactories + requestPolicyFactories, }; } function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { @@ -14229,12 +14487,18 @@ function getPropertyFromParameterPath(parent, parameterPath) { } return result; } +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ function flattenResponse(_response, responseSpec) { const parsedHeaders = _response.parsedHeaders; const bodyMapper = responseSpec && responseSpec.bodyMapper; const addOperationResponse = (obj) => { return Object.defineProperty(obj, "_response", { - value: _response + value: _response, }); }; if (bodyMapper) { @@ -14320,9 +14584,16 @@ class ExpiringAccessTokenCache { this.cachedToken = undefined; this.tokenRefreshBufferMs = tokenRefreshBufferMs; } + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ setCachedToken(accessToken) { this.cachedToken = accessToken; } + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ getCachedToken() { if (this.cachedToken && Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { @@ -14381,6 +14652,9 @@ class AccessTokenRefresher { // Copyright (c) Microsoft Corporation. const HeaderConstants = Constants.HeaderConstants; const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ class BasicAuthenticationCredentials { /** * Creates a new BasicAuthenticationCredentials object. @@ -14390,6 +14664,10 @@ class BasicAuthenticationCredentials { * @param authorizationScheme - The authorization scheme. */ constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { throw new Error("userName cannot be null or undefined and must be of type string."); @@ -14469,6 +14747,9 @@ class ApiKeyCredentials { } // Copyright (c) Microsoft Corporation. +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ class TopicCredentials extends ApiKeyCredentials { /** * Creates a new EventGrid TopicCredentials object. @@ -14481,8 +14762,8 @@ class TopicCredentials extends ApiKeyCredentials { } const options = { inHeader: { - "aeg-sas-key": topicKey - } + "aeg-sas-key": topicKey, + }, }; super(options); } @@ -14490,9 +14771,7 @@ class TopicCredentials extends ApiKeyCredentials { Object.defineProperty(exports, 'isTokenCredential', { enumerable: true, - get: function () { - return coreAuth.isTokenCredential; - } + get: function () { return coreAuth.isTokenCredential; } }); exports.AccessTokenRefresher = AccessTokenRefresher; exports.ApiKeyCredentials = ApiKeyCredentials; @@ -14900,56 +15179,11 @@ var SamplingDecision; /***/ }), /* 341 */, /* 342 */, -/* 343 */ -/***/ (function(module) { - -module.exports = require("timers"); - -/***/ }), +/* 343 */, /* 344 */, /* 345 */, /* 346 */, -/* 347 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -// Generated by CoffeeScript 1.12.7 -(function() { - var XMLStringWriter, XMLWriterBase, - extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, - hasProp = {}.hasOwnProperty; - - XMLWriterBase = __webpack_require__(423); - - module.exports = XMLStringWriter = (function(superClass) { - extend(XMLStringWriter, superClass); - - function XMLStringWriter(options) { - XMLStringWriter.__super__.constructor.call(this, options); - } - - XMLStringWriter.prototype.document = function(doc, options) { - var child, i, len, r, ref; - options = this.filterOptions(options); - r = ''; - ref = doc.children; - for (i = 0, len = ref.length; i < len; i++) { - child = ref[i]; - r += this.writeChildNode(child, options, 0); - } - if (options.pretty && r.slice(-options.newline.length) === options.newline) { - r = r.slice(0, -options.newline.length); - } - return r; - }; - - return XMLStringWriter; - - })(XMLWriterBase); - -}).call(this); - - -/***/ }), +/* 347 */, /* 348 */ /***/ (function(__unusedmodule, exports) { @@ -15080,7 +15314,7 @@ module.exports = require("assert"); "use strict"; -var punycode = __webpack_require__(213); +var punycode = __webpack_require__(815); var mappingTable = __webpack_require__(482); var PROCESSING_OPTIONS = { @@ -15379,7 +15613,7 @@ function logProxy(funcName, namespace, args) { Object.defineProperty(exports, '__esModule', { value: true }); var coreHttp = __webpack_require__(330); -var tslib = __webpack_require__(815); +var tslib = __webpack_require__(608); var coreTracing = __webpack_require__(263); var logger$1 = __webpack_require__(492); var abortController = __webpack_require__(106); @@ -15392,6 +15626,29 @@ var events = __webpack_require__(614); var fs = __webpack_require__(747); var util = __webpack_require__(669); +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var coreHttp__namespace = /*#__PURE__*/_interopNamespace(coreHttp); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var fs__namespace = /*#__PURE__*/_interopNamespace(fs); +var util__namespace = /*#__PURE__*/_interopNamespace(util); + /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. @@ -16346,10 +16603,10 @@ const BlobItemInternal = { modelProperties: { name: { serializedName: "Name", - required: true, xmlName: "Name", type: { - name: "String" + name: "Composite", + className: "BlobName" } }, deleted: { @@ -16424,6 +16681,30 @@ const BlobItemInternal = { } } }; +const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean" + } + }, + content: { + serializedName: "content", + xmlName: "content", + type: { + name: "String" + } + } + } + } +}; const BlobPropertiesInternal = { serializedName: "BlobPropertiesInternal", xmlName: "Properties", @@ -16867,10 +17148,10 @@ const BlobPrefix = { modelProperties: { name: { serializedName: "Name", - required: true, xmlName: "Name", type: { - name: "String" + name: "Composite", + className: "BlobName" } } } @@ -18493,6 +18774,59 @@ const ContainerSubmitBatchExceptionHeaders = { } } }; +const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; const ContainerAcquireLeaseHeaders = { serializedName: "Container_acquireLeaseHeaders", type: { @@ -21001,6 +21335,13 @@ const BlobCopyFromURLHeaders = { name: "ByteArray" } }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -23518,6 +23859,7 @@ var Mappers = /*#__PURE__*/Object.freeze({ ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, BlobFlatListSegment: BlobFlatListSegment, BlobItemInternal: BlobItemInternal, + BlobName: BlobName, BlobPropertiesInternal: BlobPropertiesInternal, ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, BlobHierarchyListSegment: BlobHierarchyListSegment, @@ -23569,6 +23911,8 @@ var Mappers = /*#__PURE__*/Object.freeze({ ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, @@ -23756,7 +24100,7 @@ const timeoutInSeconds = { const version = { parameterPath: "version", mapper: { - defaultValue: "2020-10-02", + defaultValue: "2021-04-10", isConstant: true, serializedName: "x-ms-version", type: { @@ -23851,7 +24195,7 @@ const include = { element: { type: { name: "Enum", - allowedValues: ["metadata", "deleted"] + allowedValues: ["metadata", "deleted", "system"] } } } @@ -24373,11 +24717,10 @@ const encryptionKeySha256 = { } }; const encryptionAlgorithm = { - parameterPath: ["options", "encryptionAlgorithm"], + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], mapper: { - defaultValue: "AES256", - isConstant: true, serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", type: { name: "String" } @@ -25294,7 +25637,7 @@ class Service { setProperties(blobServiceProperties, options) { const operationArguments = { blobServiceProperties, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); } @@ -25305,9 +25648,9 @@ class Service { */ getProperties(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); } /** * Retrieves statistics related to replication for the Blob service. It is only available on the @@ -25317,7 +25660,7 @@ class Service { */ getStatistics(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); } @@ -25327,7 +25670,7 @@ class Service { */ listContainersSegment(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); } @@ -25340,7 +25683,7 @@ class Service { getUserDelegationKey(keyInfo, options) { const operationArguments = { keyInfo, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); } @@ -25350,9 +25693,9 @@ class Service { */ getAccountInfo(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. @@ -25367,9 +25710,9 @@ class Service { contentLength, multipartContentType, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a @@ -25379,13 +25722,13 @@ class Service { */ filterBlobs(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); } } // Operation Specifications -const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$5 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); const setPropertiesOperationSpec = { path: "/", httpMethod: "PUT", @@ -25414,9 +25757,9 @@ const setPropertiesOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const getPropertiesOperationSpec = { +const getPropertiesOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { @@ -25441,7 +25784,7 @@ const getPropertiesOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; const getStatisticsOperationSpec = { path: "/", @@ -25468,7 +25811,7 @@ const getStatisticsOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; const listContainersSegmentOperationSpec = { path: "/", @@ -25498,7 +25841,7 @@ const listContainersSegmentOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; const getUserDelegationKeyOperationSpec = { path: "/", @@ -25529,9 +25872,9 @@ const getUserDelegationKeyOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const getAccountInfoOperationSpec = { +const getAccountInfoOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { @@ -25547,9 +25890,9 @@ const getAccountInfoOperationSpec = { urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const submitBatchOperationSpec = { +const submitBatchOperationSpec$1 = { path: "/", httpMethod: "POST", responses: { @@ -25579,9 +25922,9 @@ const submitBatchOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const filterBlobsOperationSpec = { +const filterBlobsOperationSpec$1 = { path: "/", httpMethod: "GET", responses: { @@ -25608,7 +25951,7 @@ const filterBlobsOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; /* @@ -25634,9 +25977,9 @@ class Container { */ create(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec); + return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); } /** * returns all user-defined metadata and system properties for the specified container. The data @@ -25645,7 +25988,7 @@ class Container { */ getProperties(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); } @@ -25656,9 +25999,9 @@ class Container { */ delete(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); } /** * operation sets one or more user-defined name-value pairs for the specified container. @@ -25666,9 +26009,9 @@ class Container { */ setMetadata(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); } /** * gets the permissions for the specified container. The permissions indicate whether container data @@ -25677,7 +26020,7 @@ class Container { */ getAccessPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); } @@ -25688,7 +26031,7 @@ class Container { */ setAccessPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); } @@ -25698,7 +26041,7 @@ class Container { */ restore(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); } @@ -25710,7 +26053,7 @@ class Container { rename(sourceContainerName, options) { const operationArguments = { sourceContainerName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, renameOperationSpec); } @@ -25727,9 +26070,20 @@ class Container { contentLength, multipartContentType, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -25738,9 +26092,9 @@ class Container { */ acquireLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -25751,9 +26105,9 @@ class Container { releaseLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -25764,9 +26118,9 @@ class Container { renewLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -25775,9 +26129,9 @@ class Container { */ breakLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -25792,9 +26146,9 @@ class Container { const operationArguments = { leaseId, proposedLeaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container @@ -25802,7 +26156,7 @@ class Container { */ listBlobFlatSegment(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); } @@ -25817,7 +26171,7 @@ class Container { listBlobHierarchySegment(delimiter, options) { const operationArguments = { delimiter, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); } @@ -25827,14 +26181,14 @@ class Container { */ getAccountInfo(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); } } // Operation Specifications -const xmlSerializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const createOperationSpec = { +const xmlSerializer$4 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const createOperationSpec$2 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -25858,7 +26212,7 @@ const createOperationSpec = { preventEncryptionScopeOverride ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const getPropertiesOperationSpec$1 = { path: "/{containerName}", @@ -25881,9 +26235,9 @@ const getPropertiesOperationSpec$1 = { leaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const deleteOperationSpec = { +const deleteOperationSpec$1 = { path: "/{containerName}", httpMethod: "DELETE", responses: { @@ -25906,9 +26260,9 @@ const deleteOperationSpec = { ifUnmodifiedSince ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const setMetadataOperationSpec = { +const setMetadataOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -25935,7 +26289,7 @@ const setMetadataOperationSpec = { ifModifiedSince ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const getAccessPolicyOperationSpec = { path: "/{containerName}", @@ -25974,7 +26328,7 @@ const getAccessPolicyOperationSpec = { leaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const setAccessPolicyOperationSpec = { path: "/{containerName}", @@ -26008,7 +26362,7 @@ const setAccessPolicyOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const restoreOperationSpec = { path: "/{containerName}", @@ -26036,7 +26390,7 @@ const restoreOperationSpec = { deletedContainerVersion ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const renameOperationSpec = { path: "/{containerName}", @@ -26064,9 +26418,9 @@ const renameOperationSpec = { sourceLeaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const submitBatchOperationSpec$1 = { +const submitBatchOperationSpec = { path: "/{containerName}", httpMethod: "POST", responses: { @@ -26100,9 +26454,39 @@ const submitBatchOperationSpec$1 = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const acquireLeaseOperationSpec = { +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + restype2 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const acquireLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26131,9 +26515,9 @@ const acquireLeaseOperationSpec = { proposedLeaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const releaseLeaseOperationSpec = { +const releaseLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26161,9 +26545,9 @@ const releaseLeaseOperationSpec = { leaseId1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const renewLeaseOperationSpec = { +const renewLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26191,9 +26575,9 @@ const renewLeaseOperationSpec = { action2 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const breakLeaseOperationSpec = { +const breakLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26221,9 +26605,9 @@ const breakLeaseOperationSpec = { breakPeriod ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const changeLeaseOperationSpec = { +const changeLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -26252,7 +26636,7 @@ const changeLeaseOperationSpec = { proposedLeaseId1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const listBlobFlatSegmentOperationSpec = { path: "/{containerName}", @@ -26283,7 +26667,7 @@ const listBlobFlatSegmentOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const listBlobHierarchySegmentOperationSpec = { path: "/{containerName}", @@ -26315,7 +26699,7 @@ const listBlobHierarchySegmentOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const getAccountInfoOperationSpec$1 = { path: "/{containerName}", @@ -26333,7 +26717,7 @@ const getAccountInfoOperationSpec$1 = { urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; /* @@ -26359,7 +26743,7 @@ class Blob$1 { */ download(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); } @@ -26370,9 +26754,9 @@ class Blob$1 { */ getProperties(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); } /** * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is @@ -26391,9 +26775,9 @@ class Blob$1 { */ delete(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); } /** * Undelete a blob that was previously soft deleted @@ -26401,7 +26785,7 @@ class Blob$1 { */ undelete(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); } @@ -26413,7 +26797,7 @@ class Blob$1 { setExpiry(expiryOptions, options) { const operationArguments = { expiryOptions, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); } @@ -26423,7 +26807,7 @@ class Blob$1 { */ setHttpHeaders(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); } @@ -26433,7 +26817,7 @@ class Blob$1 { */ setImmutabilityPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); } @@ -26443,7 +26827,7 @@ class Blob$1 { */ deleteImmutabilityPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); } @@ -26455,7 +26839,7 @@ class Blob$1 { setLegalHold(legalHold, options) { const operationArguments = { legalHold, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); } @@ -26466,9 +26850,9 @@ class Blob$1 { */ setMetadata(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -26477,9 +26861,9 @@ class Blob$1 { */ acquireLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -26490,9 +26874,9 @@ class Blob$1 { releaseLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -26503,9 +26887,9 @@ class Blob$1 { renewLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -26520,9 +26904,9 @@ class Blob$1 { const operationArguments = { leaseId, proposedLeaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -26531,9 +26915,9 @@ class Blob$1 { */ breakLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); } /** * The Create Snapshot operation creates a read-only snapshot of a blob @@ -26541,7 +26925,7 @@ class Blob$1 { */ createSnapshot(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); } @@ -26556,7 +26940,7 @@ class Blob$1 { startCopyFromURL(copySource, options) { const operationArguments = { copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); } @@ -26572,7 +26956,7 @@ class Blob$1 { copyFromURL(copySource, options) { const operationArguments = { copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); } @@ -26586,7 +26970,7 @@ class Blob$1 { abortCopyFromURL(copyId, options) { const operationArguments = { copyId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); } @@ -26602,7 +26986,7 @@ class Blob$1 { setTier(tier, options) { const operationArguments = { tier, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); } @@ -26612,9 +26996,9 @@ class Blob$1 { */ getAccountInfo(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); } /** * The Query operation enables users to select/project on blob data by providing simple query @@ -26623,7 +27007,7 @@ class Blob$1 { */ query(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, queryOperationSpec); } @@ -26633,7 +27017,7 @@ class Blob$1 { */ getTags(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); } @@ -26643,13 +27027,13 @@ class Blob$1 { */ setTags(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); } } // Operation Specifications -const xmlSerializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$3 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); const downloadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", @@ -26697,9 +27081,9 @@ const downloadOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const getPropertiesOperationSpec$2 = { +const getPropertiesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "HEAD", responses: { @@ -26732,9 +27116,9 @@ const getPropertiesOperationSpec$2 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const deleteOperationSpec$1 = { +const deleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { @@ -26766,7 +27150,7 @@ const deleteOperationSpec$1 = { deleteSnapshots ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const undeleteOperationSpec = { path: "/{containerName}/{blob}", @@ -26788,7 +27172,7 @@ const undeleteOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setExpiryOperationSpec = { path: "/{containerName}/{blob}", @@ -26812,7 +27196,7 @@ const setExpiryOperationSpec = { expiresOn ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setHttpHeadersOperationSpec = { path: "/{containerName}/{blob}", @@ -26846,7 +27230,7 @@ const setHttpHeadersOperationSpec = { blobContentDisposition ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", @@ -26871,7 +27255,7 @@ const setImmutabilityPolicyOperationSpec = { immutabilityPolicyMode ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const deleteImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", @@ -26893,7 +27277,7 @@ const deleteImmutabilityPolicyOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setLegalHoldOperationSpec = { path: "/{containerName}/{blob}", @@ -26916,9 +27300,9 @@ const setLegalHoldOperationSpec = { legalHold ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const setMetadataOperationSpec$1 = { +const setMetadataOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -26949,9 +27333,9 @@ const setMetadataOperationSpec$1 = { encryptionScope ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const acquireLeaseOperationSpec$1 = { +const acquireLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -26979,9 +27363,9 @@ const acquireLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const releaseLeaseOperationSpec$1 = { +const releaseLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27008,9 +27392,9 @@ const releaseLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const renewLeaseOperationSpec$1 = { +const renewLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27037,9 +27421,9 @@ const renewLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const changeLeaseOperationSpec$1 = { +const changeLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27067,9 +27451,9 @@ const changeLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const breakLeaseOperationSpec$1 = { +const breakLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -27096,7 +27480,7 @@ const breakLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const createSnapshotOperationSpec = { path: "/{containerName}/{blob}", @@ -27129,7 +27513,7 @@ const createSnapshotOperationSpec = { encryptionScope ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const startCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -27171,7 +27555,7 @@ const startCopyFromURLOperationSpec = { legalHold1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const copyFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -27200,6 +27584,7 @@ const copyFromURLOperationSpec = { ifTags, immutabilityPolicyExpiry, immutabilityPolicyMode, + encryptionScope, tier, sourceIfModifiedSince, sourceIfUnmodifiedSince, @@ -27213,7 +27598,7 @@ const copyFromURLOperationSpec = { copySourceAuthorization ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const abortCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -27241,7 +27626,7 @@ const abortCopyFromURLOperationSpec = { copyActionAbortConstant ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setTierOperationSpec = { path: "/{containerName}/{blob}", @@ -27275,9 +27660,9 @@ const setTierOperationSpec = { tier1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const getAccountInfoOperationSpec$2 = { +const getAccountInfoOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -27293,7 +27678,7 @@ const getAccountInfoOperationSpec$2 = { urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const queryOperationSpec = { path: "/{containerName}/{blob}", @@ -27343,7 +27728,7 @@ const queryOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const getTagsOperationSpec = { path: "/{containerName}/{blob}", @@ -27373,7 +27758,7 @@ const getTagsOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setTagsOperationSpec = { path: "/{containerName}/{blob}", @@ -27407,7 +27792,7 @@ const setTagsOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; /* @@ -27437,7 +27822,7 @@ class PageBlob { const operationArguments = { contentLength, blobContentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); } @@ -27451,7 +27836,7 @@ class PageBlob { const operationArguments = { contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); } @@ -27463,7 +27848,7 @@ class PageBlob { clearPages(contentLength, options) { const operationArguments = { contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); } @@ -27484,7 +27869,7 @@ class PageBlob { sourceRange, contentLength, range, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); } @@ -27495,7 +27880,7 @@ class PageBlob { */ getPageRanges(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); } @@ -27506,7 +27891,7 @@ class PageBlob { */ getPageRangesDiff(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); } @@ -27519,7 +27904,7 @@ class PageBlob { resize(blobContentLength, options) { const operationArguments = { blobContentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); } @@ -27533,7 +27918,7 @@ class PageBlob { updateSequenceNumber(sequenceNumberAction, options) { const operationArguments = { sequenceNumberAction, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); } @@ -27552,14 +27937,14 @@ class PageBlob { copyIncremental(copySource, options) { const operationArguments = { copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); } } // Operation Specifications -const xmlSerializer$3 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const xmlSerializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); const createOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", @@ -27606,7 +27991,7 @@ const createOperationSpec$1 = { blobSequenceNumber ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const uploadPagesOperationSpec = { path: "/{containerName}/{blob}", @@ -27648,7 +28033,7 @@ const uploadPagesOperationSpec = { ifSequenceNumberEqualTo ], mediaType: "binary", - serializer + serializer: serializer$2 }; const clearPagesOperationSpec = { path: "/{containerName}/{blob}", @@ -27686,7 +28071,7 @@ const clearPagesOperationSpec = { pageWrite1 ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const uploadPagesFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -27733,7 +28118,7 @@ const uploadPagesFromURLOperationSpec = { range1 ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const getPageRangesOperationSpec = { path: "/{containerName}/{blob}", @@ -27767,7 +28152,7 @@ const getPageRangesOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const getPageRangesDiffOperationSpec = { path: "/{containerName}/{blob}", @@ -27803,7 +28188,7 @@ const getPageRangesDiffOperationSpec = { prevSnapshotUrl ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const resizeOperationSpec = { path: "/{containerName}/{blob}", @@ -27836,7 +28221,7 @@ const resizeOperationSpec = { blobContentLength ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const updateSequenceNumberOperationSpec = { path: "/{containerName}/{blob}", @@ -27866,7 +28251,7 @@ const updateSequenceNumberOperationSpec = { sequenceNumberAction ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const copyIncrementalOperationSpec = { path: "/{containerName}/{blob}", @@ -27894,7 +28279,7 @@ const copyIncrementalOperationSpec = { copySource ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; /* @@ -27921,9 +28306,9 @@ class AppendBlob { create(contentLength, options) { const operationArguments = { contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, createOperationSpec); } /** * The Append Block operation commits a new block of data to the end of an existing append blob. The @@ -27937,7 +28322,7 @@ class AppendBlob { const operationArguments = { contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); } @@ -27954,7 +28339,7 @@ class AppendBlob { const operationArguments = { sourceUrl, contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); } @@ -27965,15 +28350,15 @@ class AppendBlob { */ seal(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, sealOperationSpec); } } // Operation Specifications -const xmlSerializer$4 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ false); -const createOperationSpec$2 = { +const xmlSerializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -28016,7 +28401,7 @@ const createOperationSpec$2 = { blobType1 ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; const appendBlockOperationSpec = { path: "/{containerName}/{blob}", @@ -28100,7 +28485,7 @@ const appendBlockFromUrlOperationSpec = { sourceRange1 ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; const sealOperationSpec = { path: "/{containerName}/{blob}", @@ -28128,7 +28513,7 @@ const sealOperationSpec = { appendPosition ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; /* @@ -28160,7 +28545,7 @@ class BlockBlob { const operationArguments = { contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); } @@ -28181,7 +28566,7 @@ class BlockBlob { const operationArguments = { contentLength, copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); } @@ -28199,7 +28584,7 @@ class BlockBlob { blockId, contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); } @@ -28218,7 +28603,7 @@ class BlockBlob { blockId, contentLength, sourceUrl, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); } @@ -28236,7 +28621,7 @@ class BlockBlob { commitBlockList(blocks, options) { const operationArguments = { blocks, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); } @@ -28250,14 +28635,14 @@ class BlockBlob { getBlockList(listType, options) { const operationArguments = { listType, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); } } // Operation Specifications -const xmlSerializer$5 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ false); +const xmlSerializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); const uploadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", @@ -28305,7 +28690,7 @@ const uploadOperationSpec = { blobType2 ], mediaType: "binary", - serializer: serializer$2 + serializer }; const putBlobFromUrlOperationSpec = { path: "/{containerName}/{blob}", @@ -28358,7 +28743,7 @@ const putBlobFromUrlOperationSpec = { copySourceBlobProperties ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; const stageBlockOperationSpec = { path: "/{containerName}/{blob}", @@ -28394,7 +28779,7 @@ const stageBlockOperationSpec = { accept2 ], mediaType: "binary", - serializer: serializer$2 + serializer }; const stageBlockFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -28435,7 +28820,7 @@ const stageBlockFromURLOperationSpec = { sourceRange1 ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; const commitBlockListOperationSpec = { path: "/{containerName}/{blob}", @@ -28485,7 +28870,7 @@ const commitBlockListOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer }; const getBlockListOperationSpec = { path: "/{containerName}/{blob}", @@ -28515,7 +28900,7 @@ const getBlockListOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; // Copyright (c) Microsoft Corporation. @@ -28526,8 +28911,8 @@ const logger = logger$1.createClientLogger("storage-blob"); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const SDK_VERSION = "12.8.0"; -const SERVICE_VERSION = "2020-10-02"; +const SDK_VERSION = "12.9.0"; +const SERVICE_VERSION = "2021-04-10"; const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB const BLOCK_BLOB_MAX_BLOCKS = 50000; @@ -28544,15 +28929,15 @@ const URLConstants = { SIGNATURE: "sig", SNAPSHOT: "snapshot", VERSIONID: "versionid", - TIMEOUT: "timeout" - } + TIMEOUT: "timeout", + }, }; const HTTPURLConnection = { HTTP_ACCEPTED: 202, HTTP_CONFLICT: 409, HTTP_NOT_FOUND: 404, HTTP_PRECON_FAILED: 412, - HTTP_RANGE_NOT_SATISFIABLE: 416 + HTTP_RANGE_NOT_SATISFIABLE: 416, }; const HeaderConstants = { AUTHORIZATION: "Authorization", @@ -28577,7 +28962,7 @@ const HeaderConstants = { X_MS_COPY_SOURCE: "x-ms-copy-source", X_MS_DATE: "x-ms-date", X_MS_ERROR_CODE: "x-ms-error-code", - X_MS_VERSION: "x-ms-version" + X_MS_VERSION: "x-ms-version", }; const ETagNone = ""; const ETagAny = "*"; @@ -28682,7 +29067,7 @@ const StorageBlobLoggingAllowedHeaderNames = [ "x-ms-tag-count", "x-ms-encryption-key-sha256", "x-ms-if-tags", - "x-ms-source-if-tags" + "x-ms-source-if-tags", ]; const StorageBlobLoggingAllowedQueryParameters = [ "comp", @@ -28717,8 +29102,9 @@ const StorageBlobLoggingAllowedQueryParameters = [ "skt", "sktid", "skv", - "snapshot" + "snapshot", ]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; // Copyright (c) Microsoft Corporation. /** @@ -28858,7 +29244,7 @@ function extractConnectionStringParts(connectionString) { url: blobEndpoint, accountName, accountKey, - proxyUri + proxyUri, }; } else { @@ -29190,14 +29576,14 @@ function toBlobTags(tags) { return undefined; } const res = { - blobTagSet: [] + blobTagSet: [], }; for (const key in tags) { if (Object.prototype.hasOwnProperty.call(tags, key)) { const value = tags[key]; res.blobTagSet.push({ key, - value + value, }); } } @@ -29237,33 +29623,33 @@ function toQuerySerialization(textConfiguration) { fieldQuote: textConfiguration.fieldQuote || "", recordSeparator: textConfiguration.recordSeparator, escapeChar: textConfiguration.escapeCharacter || "", - headersPresent: textConfiguration.hasHeaders || false - } - } + headersPresent: textConfiguration.hasHeaders || false, + }, + }, }; case "json": return { format: { type: "json", jsonTextConfiguration: { - recordSeparator: textConfiguration.recordSeparator - } - } + recordSeparator: textConfiguration.recordSeparator, + }, + }, }; case "arrow": return { format: { type: "arrow", arrowConfiguration: { - schema: textConfiguration.schema - } - } + schema: textConfiguration.schema, + }, + }, }; case "parquet": return { format: { - type: "parquet" - } + type: "parquet", + }, }; default: throw Error("Invalid BlobQueryTextConfiguration."); @@ -29287,7 +29673,7 @@ function parseObjectReplicationRecord(objectReplicationRecord) { } const rule = { ruleId: ids[1], - replicationStatus: objectReplicationRecord[key] + replicationStatus: objectReplicationRecord[key], }; const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); if (policyIndex > -1) { @@ -29296,7 +29682,7 @@ function parseObjectReplicationRecord(objectReplicationRecord) { else { orProperties.push({ policyId: ids[0], - rules: [rule] + rules: [rule], }); } } @@ -29315,6 +29701,202 @@ function attachCredential(thing, credential) { function httpAuthorizationToString(httpAuthorization) { return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; } +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function decodeBase64String(value) { + if (coreHttp.isNode) { + return Buffer.from(value, "base64"); + } + else { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; + } +} +function ParseBoolean(content) { + if (content === undefined) + return undefined; + if (content === "true") + return true; + if (content === "false") + return false; + return undefined; +} +function ParseBlobName(blobNameInXML) { + if (blobNameInXML["$"] !== undefined && blobNameInXML["#"] !== undefined) { + return { + encoded: ParseBoolean(blobNameInXML["$"]["Encoded"]), + content: blobNameInXML["#"], + }; + } + else { + return { + encoded: false, + content: blobNameInXML, + }; + } +} +function ParseBlobItem(blobInXML) { + const blobPropertiesInXML = blobInXML["Properties"]; + const blobProperties = { + createdOn: new Date(blobPropertiesInXML["Creation-Time"]), + lastModified: new Date(blobPropertiesInXML["Last-Modified"]), + etag: blobPropertiesInXML["Etag"], + contentLength: blobPropertiesInXML["Content-Length"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["Content-Length"]), + contentType: blobPropertiesInXML["Content-Type"], + contentEncoding: blobPropertiesInXML["Content-Encoding"], + contentLanguage: blobPropertiesInXML["Content-Language"], + contentMD5: decodeBase64String(blobPropertiesInXML["Content-MD5"]), + contentDisposition: blobPropertiesInXML["Content-Disposition"], + cacheControl: blobPropertiesInXML["Cache-Control"], + blobSequenceNumber: blobPropertiesInXML["x-ms-blob-sequence-number"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["x-ms-blob-sequence-number"]), + blobType: blobPropertiesInXML["BlobType"], + leaseStatus: blobPropertiesInXML["LeaseStatus"], + leaseState: blobPropertiesInXML["LeaseState"], + leaseDuration: blobPropertiesInXML["LeaseDuration"], + copyId: blobPropertiesInXML["CopyId"], + copyStatus: blobPropertiesInXML["CopyStatus"], + copySource: blobPropertiesInXML["CopySource"], + copyProgress: blobPropertiesInXML["CopyProgress"], + copyCompletedOn: blobPropertiesInXML["CopyCompletionTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["CopyCompletionTime"]), + copyStatusDescription: blobPropertiesInXML["CopyStatusDescription"], + serverEncrypted: ParseBoolean(blobPropertiesInXML["ServerEncrypted"]), + incrementalCopy: ParseBoolean(blobPropertiesInXML["IncrementalCopy"]), + destinationSnapshot: blobPropertiesInXML["DestinationSnapshot"], + deletedOn: blobPropertiesInXML["DeletedTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["DeletedTime"]), + remainingRetentionDays: blobPropertiesInXML["RemainingRetentionDays"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["RemainingRetentionDays"]), + accessTier: blobPropertiesInXML["AccessTier"], + accessTierInferred: ParseBoolean(blobPropertiesInXML["AccessTierInferred"]), + archiveStatus: blobPropertiesInXML["ArchiveStatus"], + customerProvidedKeySha256: blobPropertiesInXML["CustomerProvidedKeySha256"], + encryptionScope: blobPropertiesInXML["EncryptionScope"], + accessTierChangedOn: blobPropertiesInXML["AccessTierChangeTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["AccessTierChangeTime"]), + tagCount: blobPropertiesInXML["TagCount"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["TagCount"]), + expiresOn: blobPropertiesInXML["Expiry-Time"] === undefined + ? undefined + : new Date(blobPropertiesInXML["Expiry-Time"]), + isSealed: ParseBoolean(blobPropertiesInXML["Sealed"]), + rehydratePriority: blobPropertiesInXML["RehydratePriority"], + lastAccessedOn: blobPropertiesInXML["LastAccessTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["LastAccessTime"]), + immutabilityPolicyExpiresOn: blobPropertiesInXML["ImmutabilityPolicyUntilDate"] === undefined + ? undefined + : new Date(blobPropertiesInXML["ImmutabilityPolicyUntilDate"]), + immutabilityPolicyMode: blobPropertiesInXML["ImmutabilityPolicyMode"], + legalHold: ParseBoolean(blobPropertiesInXML["LegalHold"]), + }; + return { + name: ParseBlobName(blobInXML["Name"]), + deleted: ParseBoolean(blobInXML["Deleted"]), + snapshot: blobInXML["Snapshot"], + versionId: blobInXML["VersionId"], + isCurrentVersion: ParseBoolean(blobInXML["IsCurrentVersion"]), + properties: blobProperties, + metadata: blobInXML["Metadata"], + blobTags: ParseBlobTags(blobInXML["Tags"]), + objectReplicationMetadata: blobInXML["OrMetadata"], + hasVersionsOnly: ParseBoolean(blobInXML["HasVersionsOnly"]), + }; +} +function ParseBlobPrefix(blobPrefixInXML) { + return { + name: ParseBlobName(blobPrefixInXML["Name"]), + }; +} +function ParseBlobTag(blobTagInXML) { + return { + key: blobTagInXML["Key"], + value: blobTagInXML["Value"], + }; +} +function ParseBlobTags(blobTagsInXML) { + if (blobTagsInXML === undefined || + blobTagsInXML["TagSet"] === undefined || + blobTagsInXML["TagSet"]["Tag"] === undefined) { + return undefined; + } + const blobTagSet = []; + if (blobTagsInXML["TagSet"]["Tag"] instanceof Array) { + blobTagsInXML["TagSet"]["Tag"].forEach((blobTagInXML) => { + blobTagSet.push(ParseBlobTag(blobTagInXML)); + }); + } + else { + blobTagSet.push(ParseBlobTag(blobTagsInXML["TagSet"]["Tag"])); + } + return { blobTagSet: blobTagSet }; +} +function ProcessBlobItems(blobArrayInXML) { + const blobItems = []; + if (blobArrayInXML instanceof Array) { + blobArrayInXML.forEach((blobInXML) => { + blobItems.push(ParseBlobItem(blobInXML)); + }); + } + else { + blobItems.push(ParseBlobItem(blobArrayInXML)); + } + return blobItems; +} +function ProcessBlobPrefixes(blobPrefixesInXML) { + const blobPrefixes = []; + if (blobPrefixesInXML instanceof Array) { + blobPrefixesInXML.forEach((blobPrefixInXML) => { + blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML)); + }); + } + else { + blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML)); + } + return blobPrefixes; +} // Copyright (c) Microsoft Corporation. /** @@ -29345,9 +29927,16 @@ class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { * @param request - */ async sendRequest(request) { - { + if (coreHttp.isNode) { return this._nextPolicy.sendRequest(request); } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); } } @@ -29368,6 +29957,10 @@ class StorageBrowserPolicyFactory { } // Copyright (c) Microsoft Corporation. +/** + * RetryPolicy types. + */ +exports.StorageRetryPolicyType = void 0; (function (StorageRetryPolicyType) { /** * Exponential retry. Retry time delay grows exponentially. @@ -29385,7 +29978,7 @@ const DEFAULT_RETRY_OPTIONS = { retryDelayInMs: 4 * 1000, retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, secondaryHost: "", - tryTimeoutInMs: undefined // Use server side default timeout strategy + tryTimeoutInMs: undefined, // Use server side default timeout strategy }; const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); /** @@ -29422,7 +30015,7 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost - : DEFAULT_RETRY_OPTIONS.secondaryHost + : DEFAULT_RETRY_OPTIONS.secondaryHost, }; } /** @@ -29499,7 +30092,7 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { "ENOTFOUND", "TIMEOUT", "EPIPE", - "REQUEST_SEND_ERROR" // For default xhr based http client provided in ms-rest-js + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js ]; if (err) { for (const retriableError of retriableErrors) { @@ -29685,7 +30278,7 @@ class TelemetryPolicy extends coreHttp.BaseRequestPolicy { * @param request - */ async sendRequest(request) { - { + if (coreHttp.isNode) { if (!request.headers) { request.headers = new coreHttp.HttpHeaders(); } @@ -29708,7 +30301,7 @@ class TelemetryPolicyFactory { */ constructor(telemetry) { const userAgentInfo = []; - { + if (coreHttp.isNode) { if (telemetry) { const telemetryString = telemetry.userAgentPrefix || ""; if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { @@ -29721,7 +30314,7 @@ class TelemetryPolicyFactory { userAgentInfo.push(libInfo); } // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) - const runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`; + const runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; if (userAgentInfo.indexOf(runtimeInfo) === -1) { userAgentInfo.push(runtimeInfo); } @@ -29745,6 +30338,247 @@ function getCachedDefaultHttpClient() { return _defaultHttpClient; } +// Copyright (c) Microsoft Corporation. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await coreHttp.delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + let getToken = createTokenCycler(credential, scopes); + class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const getTokenInternal = getToken; + const token = (await getTokenInternal({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + })).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + const response = await this._nextPolicy.sendRequest(webResource); + if ((response === null || response === void 0 ? void 0 : response.status) === 401) { + const challenge = getChallenge(response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; + const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.getPath().split("/"); + const tenantId = pathSegments[1]; + const getTokenForChallenge = createTokenCycler(credential, challengeScopes); + const tokenForChallenge = (await getTokenForChallenge({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + tenantId: tenantId, + })).token; + getToken = getTokenForChallenge; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return response; + } + } + return { + create: (nextPolicy, options) => { + return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + }, + }; +} + // Copyright (c) Microsoft Corporation. /** * A helper to decide if a given argument satisfies the Pipeline contract @@ -29790,7 +30624,7 @@ class Pipeline { toServiceClientOptions() { return { httpClient: this.options.httpClient, - requestPolicyFactories: this.factories + requestPolicyFactories: this.factories, }; } } @@ -29802,6 +30636,7 @@ class Pipeline { * @returns A new Pipeline object. */ function newPipeline(credential, pipelineOptions = {}) { + var _a; if (credential === undefined) { credential = new AnonymousCredential(); } @@ -29823,16 +30658,16 @@ function newPipeline(credential, pipelineOptions = {}) { coreHttp.logPolicy({ logger: logger.info, allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, - allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters - }) + allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + }), ]; - { + if (coreHttp.isNode) { // policies only available in Node.js runtime, not in browsers factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); factories.push(coreHttp.disableResponseDecompressionPolicy()); } factories.push(coreHttp.isTokenCredential(credential) - ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) + ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) : credential); return new Pipeline(factories, pipelineOptions); } @@ -29859,7 +30694,9 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { */ signRequest(request) { request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); - if (request.body && typeof request.body === "string" && request.body.length > 0) { + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } const stringToSign = [ @@ -29874,7 +30711,7 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.RANGE) + this.getHeaderValueToSign(request, HeaderConstants.RANGE), ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request) + @@ -30003,9 +30840,7 @@ class StorageSharedKeyCredential extends Credential { * @param stringToSign - */ computeHMACSHA256(stringToSign) { - return crypto.createHmac("sha256", this.accountKey) - .update(stringToSign, "utf8") - .digest("base64"); + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } } @@ -30017,8 +30852,8 @@ class StorageSharedKeyCredential extends Credential { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ const packageName = "azure-storage-blob"; -const packageVersion = "12.8.0"; -class StorageClientContext extends coreHttp.ServiceClient { +const packageVersion = "12.9.0"; +class StorageClientContext extends coreHttp__namespace.ServiceClient { /** * Initializes a new instance of the StorageClientContext class. * @param url The URL of the service account, container, or blob that is the target of the desired @@ -30034,7 +30869,7 @@ class StorageClientContext extends coreHttp.ServiceClient { options = {}; } if (!options.userAgent) { - const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; } super(undefined, options); @@ -30043,7 +30878,7 @@ class StorageClientContext extends coreHttp.ServiceClient { // Parameter assignments this.url = url; // Assigning values to Constant parameters - this.version = options.version || "2020-10-02"; + this.version = options.version || "2021-04-10"; } } @@ -30090,7 +30925,7 @@ class StorageClient { */ const createSpan = coreTracing.createSpanFunction({ packagePrefix: "Azure.Storage.Blob", - namespace: "Microsoft.Storage" + namespace: "Microsoft.Storage", }); /** * @internal @@ -30104,7 +30939,7 @@ function convertTracingToRequestOptionsBase(options) { return { // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, - tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext + tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, }; } @@ -30161,6 +30996,10 @@ class BlobSASPermissions { * Specifies SetImmutabilityPolicy access granted. */ this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; } /** * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an @@ -30202,6 +31041,9 @@ class BlobSASPermissions { case "i": blobSASPermissions.setImmutabilityPolicy = true; break; + case "y": + blobSASPermissions.permanentDelete = true; + break; default: throw new RangeError(`Invalid permission: ${char}`); } @@ -30246,6 +31088,9 @@ class BlobSASPermissions { if (permissionLike.setImmutabilityPolicy) { blobSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } return blobSASPermissions; } /** @@ -30286,6 +31131,9 @@ class BlobSASPermissions { if (this.setImmutabilityPolicy) { permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } return permissions.join(""); } } @@ -30345,6 +31193,14 @@ class ContainerSASPermissions { * Specifies SetImmutabilityPolicy access granted. */ this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; } /** * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an @@ -30389,6 +31245,12 @@ class ContainerSASPermissions { case "i": containerSASPermissions.setImmutabilityPolicy = true; break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; default: throw new RangeError(`Invalid permission ${char}`); } @@ -30436,6 +31298,12 @@ class ContainerSASPermissions { if (permissionLike.setImmutabilityPolicy) { containerSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } return containerSASPermissions; } /** @@ -30481,6 +31349,12 @@ class ContainerSASPermissions { if (this.setImmutabilityPolicy) { permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } return permissions.join(""); } } @@ -30510,9 +31384,7 @@ class UserDelegationKeyCredential { */ computeHMACSHA256(stringToSign) { // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); - return crypto.createHmac("sha256", this.key) - .update(stringToSign, "utf8") - .digest("base64"); + return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } } @@ -30530,6 +31402,10 @@ function ipRangeToString(ipRange) { } // Copyright (c) Microsoft Corporation. +/** + * Protocols for generated SAS. + */ +exports.SASProtocol = void 0; (function (SASProtocol) { /** * Protocol that allows HTTPS only @@ -30550,7 +31426,7 @@ function ipRangeToString(ipRange) { * NOTE: Instances of this class are immutable. */ class SASQueryParameters { - constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId) { + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { this.version = version; this.signature = signature; if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { @@ -30563,6 +31439,7 @@ class SASQueryParameters { this.expiresOn = permissionsOrOptions.expiresOn; this.ipRangeInner = permissionsOrOptions.ipRange; this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; this.resource = permissionsOrOptions.resource; this.cacheControl = permissionsOrOptions.cacheControl; this.contentDisposition = permissionsOrOptions.contentDisposition; @@ -30588,6 +31465,7 @@ class SASQueryParameters { this.protocol = protocol; this.startsOn = startsOn; this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; this.identifier = identifier; this.resource = resource; this.cacheControl = cacheControl; @@ -30616,7 +31494,7 @@ class SASQueryParameters { if (this.ipRangeInner) { return { end: this.ipRangeInner.end, - start: this.ipRangeInner.start + start: this.ipRangeInner.start, }; } return undefined; @@ -30635,6 +31513,7 @@ class SASQueryParameters { "se", "sip", "si", + "ses", "skoid", "sktid", "skt", @@ -30650,7 +31529,7 @@ class SASQueryParameters { "rscl", "rsct", "saoid", - "scid" + "scid", ]; const queries = []; for (const param of params) { @@ -30679,6 +31558,9 @@ class SASQueryParameters { case "si": this.tryAppendQueryParameter(queries, param, this.identifier); break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; case "skoid": // Signed object ID this.tryAppendQueryParameter(queries, param, this.signedOid); break; @@ -30763,6 +31645,15 @@ function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredent if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } // Version 2019-12-12 adds support for the blob tags permission. // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string @@ -30844,7 +31735,7 @@ function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKe blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); @@ -30913,11 +31804,81 @@ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKe blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); } +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); +} /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. @@ -30989,7 +31950,7 @@ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userD blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType + blobSASSignatureValues.contentType, ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); @@ -31068,11 +32029,91 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType + blobSASSignatureValues.contentType, ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); } +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); +} function getCanonicalName(accountName, containerName, blobName) { // Container: "/blob/account/containerName" // Blob: "/blob/account/containerName/blobName" @@ -31106,6 +32147,11 @@ function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version < "2019-12-12") { @@ -31116,10 +32162,18 @@ function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } if (version < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } blobSASSignatureValues.version = version; return blobSASSignatureValues; } @@ -31193,7 +32247,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31228,7 +32282,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31261,7 +32315,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31293,7 +32347,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31328,7 +32382,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -31408,8 +32462,7 @@ class RetriableReadableStream extends stream.Readable { }); } else { - this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this - .offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); } } else { @@ -32254,7 +33307,7 @@ class AvroReader { } async initialize(options = {}) { const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); if (!arraysEqual(header, AVRO_INIT_BYTES)) { throw new Error("Stream is not an Avro file."); @@ -32262,7 +33315,7 @@ class AvroReader { // File metadata is written as if defined by the following map schema: // { "type": "map", "values": "bytes"} this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); // Validate codec const codec = this._metadata[AVRO_CODEC_KEY]; @@ -32271,7 +33324,7 @@ class AvroReader { } // The 16-byte, randomly-generated sync marker for this file. this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); // Parse the schema const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); @@ -32280,7 +33333,7 @@ class AvroReader { this._blockOffset = this._initialBlockOffset + this._dataStream.position; } this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); // skip block length await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); @@ -32302,13 +33355,13 @@ class AvroReader { } while (this.hasNext()) { const result = yield tslib.__await(this._itemType.read(this._dataStream, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, })); this._itemsRemainingInBlock--; this._objectIndex++; if (this._itemsRemainingInBlock == 0) { const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, })); this._blockOffset = this._initialBlockOffset + this._dataStream.position; this._objectIndex = 0; @@ -32317,7 +33370,7 @@ class AvroReader { } try { this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, })); } catch (err) { @@ -32516,7 +33569,7 @@ class BlobQuickQueryStream extends stream.Readable { position, name, isFatal: fatal, - description + description, }); } break; @@ -32892,6 +33945,11 @@ class BlobQueryResponse { } // Copyright (c) Microsoft Corporation. +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +exports.BlockBlobTier = void 0; (function (BlockBlobTier) { /** * Optimized for storing data that is accessed frequently. @@ -32907,6 +33965,12 @@ class BlobQueryResponse { */ BlockBlobTier["Archive"] = "Archive"; })(exports.BlockBlobTier || (exports.BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +exports.PremiumPageBlobTier = void 0; (function (PremiumPageBlobTier) { /** * P4 Tier. @@ -32967,6 +34031,20 @@ function ensureCpkIfSpecified(cpk, isHttps) { cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; } } +/** + * Defines the known cloud audiences for Storage. + */ +exports.StorageBlobAudience = void 0; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -32979,16 +34057,16 @@ function ensureCpkIfSpecified(cpk, isHttps) { function rangeResponseFromModel(response) { const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ offset: x.start, - count: x.end - x.start + count: x.end - x.start, })); const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ offset: x.start, - count: x.end - x.start + count: x.end - x.start, })); return Object.assign(Object.assign({}, response), { pageRange, clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { pageRange, - clearRange + clearRange, } }) }); } @@ -33001,7 +34079,7 @@ function rangeResponseFromModel(response) { */ class BlobBeginCopyFromUrlPoller extends coreLro.Poller { constructor(options) { - const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions } = options; + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; let state; if (resumeFrom) { state = JSON.parse(resumeFrom).state; @@ -33037,7 +34115,7 @@ const cancel = async function cancel(options = {}) { } // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call await state.blobClient.abortCopyFromURL(copyId, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); state.isCancelled = true; return makeBlobBeginCopyFromURLPollOperation(state); @@ -33115,7 +34193,7 @@ function makeBlobBeginCopyFromURLPollOperation(state) { state: Object.assign({}, state), cancel, toString, - update + update, }; } @@ -33757,7 +34835,7 @@ async function streamToBuffer2(stream, buffer, encoding) { */ async function readStreamToLocalFile(rs, file) { return new Promise((resolve, reject) => { - const ws = fs.createWriteStream(file); + const ws = fs__namespace.createWriteStream(file); rs.on("error", (err) => { reject(err); }); @@ -33773,8 +34851,8 @@ async function readStreamToLocalFile(rs, file) { * * Promisified version of fs.stat(). */ -const fsStat = util.promisify(fs.stat); -const fsCreateReadStream = fs.createReadStream; +const fsStat = util__namespace.promisify(fs__namespace.stat); +const fsCreateReadStream = fs__namespace.createReadStream; /** * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, @@ -33817,12 +34895,17 @@ class BlobClient extends StorageClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -33839,10 +34922,8 @@ class BlobClient extends StorageClient { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - ({ - blobName: this._name, - containerName: this._containerName - } = this.getBlobAndContainerNamesFromUrl()); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); this.blobContext = new Blob$1(this.storageClientContext); this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); @@ -33967,11 +35048,13 @@ class BlobClient extends StorageClient { const { span, updatedOptions } = createSpan("BlobClient-download", options); try { const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress // for Node.js, progress is reported by RetriableReadableStream + onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); // Return browser response immediately - if (false) {} + if (!coreHttp.isNode) { + return wrappedRes; + } // We support retrying when download stream unexpected ends in Node.js runtime // Following code shouldn't be bundled into browser build, however some // bundlers may try to bundle following code and "FileReadResponse.ts". @@ -33996,16 +35079,16 @@ class BlobClient extends StorageClient { ifModifiedSince: options.conditions.ifModifiedSince, ifNoneMatch: options.conditions.ifNoneMatch, ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, }, range: rangeToString({ count: offset + res.contentLength - start, - offset: start + offset: start, }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey + cpkInfo: options.customerProvidedKey, }; // Debug purpose only // console.log( @@ -34016,13 +35099,13 @@ class BlobClient extends StorageClient { return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; }, offset, res.contentLength, { maxRetryRequests: options.maxRetryRequests, - onProgress: options.onProgress + onProgress: options.onProgress, }); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34047,21 +35130,23 @@ class BlobClient extends StorageClient { abortSignal: options.abortSignal, customerProvidedKey: options.customerProvidedKey, conditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); return true; } catch (e) { if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking blob existence" - }); + // Expected exception when checking blob existence return false; } + else if (e.statusCode === 409 && + e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg) { + // Expected exception when checking blob existence + return true; + } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34093,7 +35178,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34120,7 +35205,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34142,20 +35227,19 @@ class BlobClient extends StorageClient { const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); try { const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a blob or snapshot only if it exists." + message: "Expected exception when deleting a blob or snapshot only if it exists.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34179,7 +35263,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34213,7 +35297,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34243,7 +35327,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34269,7 +35353,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34293,7 +35377,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34327,7 +35411,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34411,7 +35495,7 @@ class BlobClient extends StorageClient { const client = { abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), getProperties: (...args) => this.getProperties(...args), - startCopyFromURL: (...args) => this.startCopyFromURL(...args) + startCopyFromURL: (...args) => this.startCopyFromURL(...args), }; const poller = new BlobBeginCopyFromUrlPoller({ blobClient: client, @@ -34419,7 +35503,7 @@ class BlobClient extends StorageClient { intervalInMs: options.intervalInMs, onProgress: options.onProgress, resumeFrom: options.resumeFrom, - startCopyFromURLOptions: options + startCopyFromURLOptions: options, }); // Trigger the startCopyFromURL call by calling poll. // Any errors from this method should be surfaced to the user. @@ -34442,7 +35526,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34468,13 +35552,13 @@ class BlobClient extends StorageClient { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold }, convertTracingToRequestOptionsBase(updatedOptions))); + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34502,7 +35586,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34579,7 +35663,7 @@ class BlobClient extends StorageClient { conditions: options.conditions, maxRetryRequests: options.maxRetryRequestsPerBlock, customerProvidedKey: options.customerProvidedKey, - tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), }); const stream = response.readableStreamBody; await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); @@ -34598,7 +35682,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34636,7 +35720,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34717,13 +35801,13 @@ class BlobClient extends StorageClient { sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions.tagConditions + sourceIfTags: options.sourceConditions.tagConditions, }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34764,7 +35848,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34785,7 +35869,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34806,7 +35890,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34857,12 +35941,17 @@ class AppendBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -34917,7 +36006,7 @@ class AppendBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34938,20 +36027,19 @@ class AppendBlobClient extends BlobClient { const conditions = { ifNoneMatch: ETagAny }; try { const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." + message: "Expected exception when creating a blob only if it does not already exist.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -34974,7 +36062,7 @@ class AppendBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35013,13 +36101,13 @@ class AppendBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35052,13 +36140,13 @@ class AppendBlobClient extends BlobClient { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35109,12 +36197,17 @@ class BlockBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -35180,23 +36273,25 @@ class BlockBlobClient extends BlobClient { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); try { - if (false) {} + if (!coreHttp.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { queryType: "SQL", expression: query, inputSerialization: toQuerySerialization(options.inputTextConfiguration), - outputSerialization: toQuerySerialization(options.outputTextConfiguration) + outputSerialization: toQuerySerialization(options.outputTextConfiguration), }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); return new BlobQueryResponse(response, { abortSignal: options.abortSignal, onProgress: options.onProgress, - onError: options.onError + onError: options.onError, }); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35238,13 +36333,13 @@ class BlockBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35281,13 +36376,13 @@ class BlockBlobClient extends BlobClient { sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, - sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }), convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35311,13 +36406,13 @@ class BlockBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35355,7 +36450,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35386,7 +36481,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35420,7 +36515,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35447,7 +36542,7 @@ class BlockBlobClient extends BlobClient { async uploadData(data, options = {}) { const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); try { - if (true) { + if (coreHttp.isNode) { let buffer; if (data instanceof Buffer) { buffer = data; @@ -35461,12 +36556,15 @@ class BlockBlobClient extends BlobClient { } return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); } - else {} + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35502,7 +36600,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35515,7 +36613,7 @@ class BlockBlobClient extends BlobClient { * Uploads data to block blob. Requires a bodyFactory as the data source, * which need to return a {@link HttpRequestBody} object with the offset and size provided. * - * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} * to commit the block list. @@ -35581,14 +36679,14 @@ class BlockBlobClient extends BlobClient { abortSignal: options.abortSignal, conditions: options.conditions, encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); // Update progress after block is successfully uploaded to server, in case of block trying // TODO: Hook with convenience layer progress event in finer level transferProgress += contentLength; if (options.onProgress) { options.onProgress({ - loadedBytes: transferProgress + loadedBytes: transferProgress, }); } }); @@ -35599,7 +36697,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35628,14 +36726,14 @@ class BlockBlobClient extends BlobClient { return () => fsCreateReadStream(filePath, { autoClose: true, end: count ? offset + count - 1 : Infinity, - start: offset + start: offset, }); }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35679,7 +36777,7 @@ class BlockBlobClient extends BlobClient { await this.stageBlock(blockID, body, length, { conditions: options.conditions, encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); // Update progress after block is successfully uploaded to server, in case of block trying transferProgress += length; @@ -35698,7 +36796,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35749,12 +36847,17 @@ class PageBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -35804,7 +36907,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35827,20 +36930,19 @@ class PageBlobClient extends BlobClient { try { const conditions = { ifNoneMatch: ETagAny }; const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." + message: "Expected exception when creating a blob only if it does not already exist.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35865,13 +36967,13 @@ class PageBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35901,13 +37003,13 @@ class PageBlobClient extends BlobClient { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35934,7 +37036,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35963,7 +37065,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -35993,7 +37095,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36023,7 +37125,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36049,7 +37151,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36076,7 +37178,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36106,7 +37208,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36254,7 +37356,7 @@ class BatchResponseParser { return { subResponses: deserializedSubResponses, subResponsesSucceededCount: subResponsesSucceededCount, - subResponsesFailedCount: subResponsesFailedCount + subResponsesFailedCount: subResponsesFailedCount, }; } } @@ -36401,7 +37503,7 @@ class BlobBatch { this.setBatchType("delete"); await this.addSubRequestInternal({ url: url, - credential: credential + credential: credential, }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); @@ -36409,7 +37511,7 @@ class BlobBatch { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36448,7 +37550,7 @@ class BlobBatch { this.setBatchType("setAccessTier"); await this.addSubRequestInternal({ url: url, - credential: credential + credential: credential, }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); }); @@ -36456,7 +37558,7 @@ class BlobBatch { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36513,7 +37615,7 @@ class InnerBatchRequest { this.subRequestPrefix, `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, "", - `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` // sub request start line with method + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method ].join(HTTP_LINE_ENDING); for (const header of request.headers.headersArray()) { this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; @@ -36553,7 +37655,7 @@ class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { this.dummyResponse = { request: new coreHttp.WebResource(), status: 200, - headers: new coreHttp.HttpHeaders() + headers: new coreHttp.HttpHeaders(), }; this.batchRequest = batchRequest; } @@ -36720,14 +37822,14 @@ class BlobBatchClient { version: rawBatchResponse.version, subResponses: responseSummary.subResponses, subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, - subResponsesFailedCount: responseSummary.subResponsesFailedCount + subResponsesFailedCount: responseSummary.subResponsesFailedCount, }; return res; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36773,12 +37875,17 @@ class ContainerClient extends StorageClient { const containerName = credentialOrPipelineOrContainerName; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -36830,7 +37937,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36850,20 +37957,19 @@ class ContainerClient extends StorageClient { const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); try { const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a container only if it does not already exist." + message: "Expected exception when creating a container only if it does not already exist.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36885,7 +37991,7 @@ class ContainerClient extends StorageClient { try { await this.getProperties({ abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); return true; } @@ -36893,13 +37999,13 @@ class ContainerClient extends StorageClient { if (e.statusCode === 404) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking container existence" + message: "Expected exception when checking container existence", }); return false; } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36973,7 +38079,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -36999,7 +38105,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37019,20 +38125,19 @@ class ContainerClient extends StorageClient { const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); try { const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a container only if it exists." + message: "Expected exception when deleting a container only if it exists.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37066,7 +38171,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37102,13 +38207,13 @@ class ContainerClient extends StorageClient { requestId: response.requestId, clientRequestId: response.clientRequestId, signedIdentifiers: [], - version: response.version + version: response.version, }; for (const identifier of response) { let accessPolicy = undefined; if (identifier.accessPolicy) { accessPolicy = { - permissions: identifier.accessPolicy.permissions + permissions: identifier.accessPolicy.permissions, }; if (identifier.accessPolicy.expiresOn) { accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); @@ -37119,7 +38224,7 @@ class ContainerClient extends StorageClient { } res.signedIdentifiers.push({ accessPolicy, - id: identifier.id + id: identifier.id, }); } return res; @@ -37127,7 +38232,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37166,9 +38271,9 @@ class ContainerClient extends StorageClient { permissions: identifier.accessPolicy.permissions, startsOn: identifier.accessPolicy.startsOn ? truncatedISO8061Date(identifier.accessPolicy.startsOn) - : "" + : "", }, - id: identifier.id + id: identifier.id, }); } return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); @@ -37176,7 +38281,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37222,13 +38327,13 @@ class ContainerClient extends StorageClient { const response = await blockBlobClient.upload(body, contentLength, updatedOptions); return { blockBlobClient, - response + response, }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37259,7 +38364,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37281,8 +38386,12 @@ class ContainerClient extends StorageClient { const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); try { const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); return blobItem; }) }) }); return wrappedResponse; @@ -37290,7 +38399,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37310,19 +38419,33 @@ class ContainerClient extends StorageClient { * @param options - Options to Container List Blob Hierarchy Segment operation. */ async listBlobHierarchySegment(delimiter, marker, options = {}) { + var _a; const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); try { const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + response.segment.blobPrefixes = []; + if (response.segment["BlobPrefix"] !== undefined) { + response.segment.blobPrefixes = ProcessBlobPrefixes(response.segment["BlobPrefix"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; }) }) }); return wrappedResponse; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -37504,7 +38627,7 @@ class ContainerClient extends StorageClient { */ byPage: (settings = {}) => { return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); - } + }, }; } /** @@ -37578,7 +38701,7 @@ class ContainerClient extends StorageClient { * if (item.kind === "prefix") { * console.log(`\tBlobPrefix: ${item.name}`); * } else { - * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${item.name}`); * } * } * ``` @@ -37593,7 +38716,7 @@ class ContainerClient extends StorageClient { * if (item.kind === "prefix") { * console.log(`\tBlobPrefix: ${item.name}`); * } else { - * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${item.name}`); * } * entity = await iter.next(); * } @@ -37611,7 +38734,7 @@ class ContainerClient extends StorageClient { * } * } * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` @@ -37622,7 +38745,9 @@ class ContainerClient extends StorageClient { * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); * * let i = 1; - * for await (const response of containerClient.listBlobsByHierarchy("/", { prefix: "prefix2/sub1/"}).byPage({ maxPageSize: 2 })) { + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { * console.log(`Page ${i++}`); * const segment = response.segment; * @@ -37633,7 +38758,7 @@ class ContainerClient extends StorageClient { * } * * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` @@ -37700,7 +38825,208 @@ class ContainerClient extends StorageClient { */ byPage: (settings = {}) => { return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); + try { + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_3) throw e_3.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, }; } getContainerNameFromUrl() { @@ -37833,6 +39159,10 @@ class AccountSASPermissions { * Permission to set immutability policy. */ this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; } /** * Parse initializes the AccountSASPermissions fields from a string. @@ -37879,6 +39209,9 @@ class AccountSASPermissions { case "i": accountSASPermissions.setImmutabilityPolicy = true; break; + case "y": + accountSASPermissions.permanentDelete = true; + break; default: throw new RangeError(`Invalid permission character: ${c}`); } @@ -37929,6 +39262,9 @@ class AccountSASPermissions { if (permissionLike.setImmutabilityPolicy) { accountSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } return accountSASPermissions; } /** @@ -37982,6 +39318,9 @@ class AccountSASPermissions { if (this.setImmutabilityPolicy) { permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } return permissions.join(""); } } @@ -38164,6 +39503,11 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version < "2019-12-12") { @@ -38174,25 +39518,48 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); - const stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn - ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) - : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version, - "" // Account SAS requires an additional newline character - ].join("\n"); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); } /** @@ -38239,12 +39606,17 @@ class BlobServiceClient extends StorageClient { options = options || {}; const extractedCreds = extractConnectionStringParts(connectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } const pipeline = newPipeline(sharedKeyCredential, options); return new BlobServiceClient(extractedCreds.url, pipeline); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { const pipeline = newPipeline(new AnonymousCredential(), options); @@ -38283,13 +39655,13 @@ class BlobServiceClient extends StorageClient { const containerCreateResponse = await containerClient.create(updatedOptions); return { containerClient, - containerCreateResponse + containerCreateResponse, }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -38313,7 +39685,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -38343,7 +39715,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -38373,7 +39745,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -38397,7 +39769,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -38422,7 +39794,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -38447,7 +39819,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -38473,7 +39845,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -38503,7 +39875,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -38546,7 +39918,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -38712,7 +40084,7 @@ class BlobServiceClient extends StorageClient { */ byPage: (settings = {}) => { return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - } + }, }; } /** @@ -38850,6 +40222,9 @@ class BlobServiceClient extends StorageClient { if (options.includeMetadata) { include.push("metadata"); } + if (options.includeSystem) { + include.push("system"); + } // AsyncIterableIterator to iterate over containers const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); const iter = this.listItems(listSegmentOptions); @@ -38871,7 +40246,7 @@ class BlobServiceClient extends StorageClient { */ byPage: (settings = {}) => { return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - } + }, }; } /** @@ -38890,7 +40265,7 @@ class BlobServiceClient extends StorageClient { try { const response = await this.serviceContext.getUserDelegationKey({ startsOn: truncatedISO8061Date(startsOn, false), - expiresOn: truncatedISO8061Date(expiresOn, false) + expiresOn: truncatedISO8061Date(expiresOn, false), }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); const userDelegationKey = { signedObjectId: response.signedObjectId, @@ -38899,7 +40274,7 @@ class BlobServiceClient extends StorageClient { signedExpiresOn: new Date(response.signedExpiresOn), signedService: response.signedService, signedVersion: response.signedVersion, - value: response.value + value: response.value, }; const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); return res; @@ -38907,7 +40282,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -38956,39 +40331,27 @@ class BlobServiceClient extends StorageClient { Object.defineProperty(exports, 'BaseRequestPolicy', { enumerable: true, - get: function () { - return coreHttp.BaseRequestPolicy; - } + get: function () { return coreHttp.BaseRequestPolicy; } }); Object.defineProperty(exports, 'HttpHeaders', { enumerable: true, - get: function () { - return coreHttp.HttpHeaders; - } + get: function () { return coreHttp.HttpHeaders; } }); Object.defineProperty(exports, 'RequestPolicyOptions', { enumerable: true, - get: function () { - return coreHttp.RequestPolicyOptions; - } + get: function () { return coreHttp.RequestPolicyOptions; } }); Object.defineProperty(exports, 'RestError', { enumerable: true, - get: function () { - return coreHttp.RestError; - } + get: function () { return coreHttp.RestError; } }); Object.defineProperty(exports, 'WebResource', { enumerable: true, - get: function () { - return coreHttp.WebResource; - } + get: function () { return coreHttp.WebResource; } }); Object.defineProperty(exports, 'deserializationPolicy', { enumerable: true, - get: function () { - return coreHttp.deserializationPolicy; - } + get: function () { return coreHttp.deserializationPolicy; } }); exports.AccountSASPermissions = AccountSASPermissions; exports.AccountSASResourceTypes = AccountSASResourceTypes; @@ -39214,7 +40577,7 @@ exports.SearchState = SearchState; * POSSIBILITY OF SUCH DAMAGE. */ -const punycode = __webpack_require__(213); +const punycode = __webpack_require__(815); const urlParse = __webpack_require__(835).parse; const util = __webpack_require__(669); const pubsuffix = __webpack_require__(562); @@ -45174,7 +46537,7 @@ module.exports = clean Object.defineProperty(exports, '__esModule', { value: true }); -__webpack_require__(71); +__webpack_require__(97); var tslib = __webpack_require__(671); // Copyright (c) Microsoft Corporation. @@ -45197,7 +46560,7 @@ function getPagedAsyncIterator(pagedResult) { }, byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { return getPageAsyncIterator(pagedResult, settings === null || settings === void 0 ? void 0 : settings.maxPageSize); - }) + }), }; } function getItemAsyncIterator(pagedResult, maxPageSize) { @@ -48652,7 +50015,7 @@ function coerce (version, options) { XMLStringifier = __webpack_require__(602); - XMLStringWriter = __webpack_require__(347); + XMLStringWriter = __webpack_require__(750); module.exports = XMLDocument = (function(superClass) { extend(XMLDocument, superClass); @@ -48944,7 +50307,7 @@ exports.baggageEntryMetadataSymbol = Symbol('BaggageEntryMetadata'); * POSSIBILITY OF SUCH DAMAGE. */ -const psl = __webpack_require__(750); +const psl = __webpack_require__(632); function getPublicSuffix(domain) { return psl.get(domain); @@ -49695,7 +51058,318 @@ Object.defineProperty(exports, "__esModule", { value: true }); //# sourceMappingURL=span_context.js.map /***/ }), -/* 608 */, +/* 608 */ +/***/ (function(module) { + +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); +}); + + +/***/ }), /* 609 */, /* 610 */, /* 611 */, @@ -50321,7 +51995,282 @@ module.exports = rcompare module.exports = require("net"); /***/ }), -/* 632 */, +/* 632 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; +/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */ + + + +var Punycode = __webpack_require__(815); + + +var internals = {}; + + +// +// Read rules from file. +// +internals.rules = __webpack_require__(50).map(function (rule) { + + return { + rule: rule, + suffix: rule.replace(/^(\*\.|\!)/, ''), + punySuffix: -1, + wildcard: rule.charAt(0) === '*', + exception: rule.charAt(0) === '!' + }; +}); + + +// +// Check is given string ends with `suffix`. +// +internals.endsWith = function (str, suffix) { + + return str.indexOf(suffix, str.length - suffix.length) !== -1; +}; + + +// +// Find rule for a given domain. +// +internals.findRule = function (domain) { + + var punyDomain = Punycode.toASCII(domain); + return internals.rules.reduce(function (memo, rule) { + + if (rule.punySuffix === -1){ + rule.punySuffix = Punycode.toASCII(rule.suffix); + } + if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) { + return memo; + } + // This has been commented out as it never seems to run. This is because + // sub tlds always appear after their parents and we never find a shorter + // match. + //if (memo) { + // var memoSuffix = Punycode.toASCII(memo.suffix); + // if (memoSuffix.length >= punySuffix.length) { + // return memo; + // } + //} + return rule; + }, null); +}; + + +// +// Error codes and messages. +// +exports.errorCodes = { + DOMAIN_TOO_SHORT: 'Domain name too short.', + DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', + LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', + LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', + LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', + LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', + LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' +}; + + +// +// Validate domain name and throw if not valid. +// +// From wikipedia: +// +// Hostnames are composed of series of labels concatenated with dots, as are all +// domain names. Each label must be between 1 and 63 characters long, and the +// entire hostname (including the delimiting dots) has a maximum of 255 chars. +// +// Allowed chars: +// +// * `a-z` +// * `0-9` +// * `-` but not as a starting or ending character +// * `.` as a separator for the textual portions of a domain name +// +// * http://en.wikipedia.org/wiki/Domain_name +// * http://en.wikipedia.org/wiki/Hostname +// +internals.validate = function (input) { + + // Before we can validate we need to take care of IDNs with unicode chars. + var ascii = Punycode.toASCII(input); + + if (ascii.length < 1) { + return 'DOMAIN_TOO_SHORT'; + } + if (ascii.length > 255) { + return 'DOMAIN_TOO_LONG'; + } + + // Check each part's length and allowed chars. + var labels = ascii.split('.'); + var label; + + for (var i = 0; i < labels.length; ++i) { + label = labels[i]; + if (!label.length) { + return 'LABEL_TOO_SHORT'; + } + if (label.length > 63) { + return 'LABEL_TOO_LONG'; + } + if (label.charAt(0) === '-') { + return 'LABEL_STARTS_WITH_DASH'; + } + if (label.charAt(label.length - 1) === '-') { + return 'LABEL_ENDS_WITH_DASH'; + } + if (!/^[a-z0-9\-]+$/.test(label)) { + return 'LABEL_INVALID_CHARS'; + } + } +}; + + +// +// Public API +// + + +// +// Parse domain. +// +exports.parse = function (input) { + + if (typeof input !== 'string') { + throw new TypeError('Domain name must be a string.'); + } + + // Force domain to lowercase. + var domain = input.slice(0).toLowerCase(); + + // Handle FQDN. + // TODO: Simply remove trailing dot? + if (domain.charAt(domain.length - 1) === '.') { + domain = domain.slice(0, domain.length - 1); + } + + // Validate and sanitise input. + var error = internals.validate(domain); + if (error) { + return { + input: input, + error: { + message: exports.errorCodes[error], + code: error + } + }; + } + + var parsed = { + input: input, + tld: null, + sld: null, + domain: null, + subdomain: null, + listed: false + }; + + var domainParts = domain.split('.'); + + // Non-Internet TLD + if (domainParts[domainParts.length - 1] === 'local') { + return parsed; + } + + var handlePunycode = function () { + + if (!/xn--/.test(domain)) { + return parsed; + } + if (parsed.domain) { + parsed.domain = Punycode.toASCII(parsed.domain); + } + if (parsed.subdomain) { + parsed.subdomain = Punycode.toASCII(parsed.subdomain); + } + return parsed; + }; + + var rule = internals.findRule(domain); + + // Unlisted tld. + if (!rule) { + if (domainParts.length < 2) { + return parsed; + } + parsed.tld = domainParts.pop(); + parsed.sld = domainParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + if (domainParts.length) { + parsed.subdomain = domainParts.pop(); + } + return handlePunycode(); + } + + // At this point we know the public suffix is listed. + parsed.listed = true; + + var tldParts = rule.suffix.split('.'); + var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); + + if (rule.exception) { + privateParts.push(tldParts.shift()); + } + + parsed.tld = tldParts.join('.'); + + if (!privateParts.length) { + return handlePunycode(); + } + + if (rule.wildcard) { + tldParts.unshift(privateParts.pop()); + parsed.tld = tldParts.join('.'); + } + + if (!privateParts.length) { + return handlePunycode(); + } + + parsed.sld = privateParts.pop(); + parsed.domain = [parsed.sld, parsed.tld].join('.'); + + if (privateParts.length) { + parsed.subdomain = privateParts.join('.'); + } + + return handlePunycode(); +}; + + +// +// Get domain. +// +exports.get = function (domain) { + + if (!domain) { + return null; + } + return exports.parse(domain).domain || null; +}; + + +// +// Check whether domain belongs to a known public suffix. +// +exports.isValid = function (domain) { + + var parsed = exports.parse(domain); + return Boolean(parsed.domain && parsed.listed); +}; + + +/***/ }), /* 633 */, /* 634 */, /* 635 */, @@ -53626,18 +55575,12 @@ exports.restoreCache = restoreCache; * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache(paths, key, options) { + var _a, _b, _c, _d, _e; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(key); const compressionMethod = yield utils.getCompressionMethod(); - core.debug('Reserving Cache'); - const cacheId = yield cacheHttpClient.reserveCache(key, paths, { - compressionMethod - }); - if (cacheId === -1) { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); - } - core.debug(`Cache ID: ${cacheId}`); + let cacheId = null; const cachePaths = yield utils.resolvePaths(paths); core.debug('Cache Paths:'); core.debug(`${JSON.stringify(cachePaths)}`); @@ -53652,9 +55595,24 @@ function saveCache(paths, key, options) { const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit) { + // For GHES, this check will take place in ReserveCache API with enterprise file size limit + if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } + core.debug('Reserving Cache'); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } + else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } + else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } core.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, options); } @@ -54759,278 +56717,43 @@ module.exports = require("fs"); /* 748 */, /* 749 */, /* 750 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ (function(module, __unusedexports, __webpack_require__) { -"use strict"; -/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */ +// Generated by CoffeeScript 1.12.7 +(function() { + var XMLStringWriter, XMLWriterBase, + extend = function(child, parent) { for (var key in parent) { if (hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; }, + hasProp = {}.hasOwnProperty; + XMLWriterBase = __webpack_require__(423); + module.exports = XMLStringWriter = (function(superClass) { + extend(XMLStringWriter, superClass); -var Punycode = __webpack_require__(213); - - -var internals = {}; - - -// -// Read rules from file. -// -internals.rules = __webpack_require__(50).map(function (rule) { - - return { - rule: rule, - suffix: rule.replace(/^(\*\.|\!)/, ''), - punySuffix: -1, - wildcard: rule.charAt(0) === '*', - exception: rule.charAt(0) === '!' - }; -}); - - -// -// Check is given string ends with `suffix`. -// -internals.endsWith = function (str, suffix) { - - return str.indexOf(suffix, str.length - suffix.length) !== -1; -}; - - -// -// Find rule for a given domain. -// -internals.findRule = function (domain) { - - var punyDomain = Punycode.toASCII(domain); - return internals.rules.reduce(function (memo, rule) { - - if (rule.punySuffix === -1){ - rule.punySuffix = Punycode.toASCII(rule.suffix); + function XMLStringWriter(options) { + XMLStringWriter.__super__.constructor.call(this, options); } - if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) { - return memo; - } - // This has been commented out as it never seems to run. This is because - // sub tlds always appear after their parents and we never find a shorter - // match. - //if (memo) { - // var memoSuffix = Punycode.toASCII(memo.suffix); - // if (memoSuffix.length >= punySuffix.length) { - // return memo; - // } - //} - return rule; - }, null); -}; - -// -// Error codes and messages. -// -exports.errorCodes = { - DOMAIN_TOO_SHORT: 'Domain name too short.', - DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.', - LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.', - LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.', - LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.', - LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.', - LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.' -}; - - -// -// Validate domain name and throw if not valid. -// -// From wikipedia: -// -// Hostnames are composed of series of labels concatenated with dots, as are all -// domain names. Each label must be between 1 and 63 characters long, and the -// entire hostname (including the delimiting dots) has a maximum of 255 chars. -// -// Allowed chars: -// -// * `a-z` -// * `0-9` -// * `-` but not as a starting or ending character -// * `.` as a separator for the textual portions of a domain name -// -// * http://en.wikipedia.org/wiki/Domain_name -// * http://en.wikipedia.org/wiki/Hostname -// -internals.validate = function (input) { - - // Before we can validate we need to take care of IDNs with unicode chars. - var ascii = Punycode.toASCII(input); - - if (ascii.length < 1) { - return 'DOMAIN_TOO_SHORT'; - } - if (ascii.length > 255) { - return 'DOMAIN_TOO_LONG'; - } - - // Check each part's length and allowed chars. - var labels = ascii.split('.'); - var label; - - for (var i = 0; i < labels.length; ++i) { - label = labels[i]; - if (!label.length) { - return 'LABEL_TOO_SHORT'; - } - if (label.length > 63) { - return 'LABEL_TOO_LONG'; - } - if (label.charAt(0) === '-') { - return 'LABEL_STARTS_WITH_DASH'; - } - if (label.charAt(label.length - 1) === '-') { - return 'LABEL_ENDS_WITH_DASH'; - } - if (!/^[a-z0-9\-]+$/.test(label)) { - return 'LABEL_INVALID_CHARS'; - } - } -}; - - -// -// Public API -// - - -// -// Parse domain. -// -exports.parse = function (input) { - - if (typeof input !== 'string') { - throw new TypeError('Domain name must be a string.'); - } - - // Force domain to lowercase. - var domain = input.slice(0).toLowerCase(); - - // Handle FQDN. - // TODO: Simply remove trailing dot? - if (domain.charAt(domain.length - 1) === '.') { - domain = domain.slice(0, domain.length - 1); - } - - // Validate and sanitise input. - var error = internals.validate(domain); - if (error) { - return { - input: input, - error: { - message: exports.errorCodes[error], - code: error + XMLStringWriter.prototype.document = function(doc, options) { + var child, i, len, r, ref; + options = this.filterOptions(options); + r = ''; + ref = doc.children; + for (i = 0, len = ref.length; i < len; i++) { + child = ref[i]; + r += this.writeChildNode(child, options, 0); } + if (options.pretty && r.slice(-options.newline.length) === options.newline) { + r = r.slice(0, -options.newline.length); + } + return r; }; - } - var parsed = { - input: input, - tld: null, - sld: null, - domain: null, - subdomain: null, - listed: false - }; + return XMLStringWriter; - var domainParts = domain.split('.'); + })(XMLWriterBase); - // Non-Internet TLD - if (domainParts[domainParts.length - 1] === 'local') { - return parsed; - } - - var handlePunycode = function () { - - if (!/xn--/.test(domain)) { - return parsed; - } - if (parsed.domain) { - parsed.domain = Punycode.toASCII(parsed.domain); - } - if (parsed.subdomain) { - parsed.subdomain = Punycode.toASCII(parsed.subdomain); - } - return parsed; - }; - - var rule = internals.findRule(domain); - - // Unlisted tld. - if (!rule) { - if (domainParts.length < 2) { - return parsed; - } - parsed.tld = domainParts.pop(); - parsed.sld = domainParts.pop(); - parsed.domain = [parsed.sld, parsed.tld].join('.'); - if (domainParts.length) { - parsed.subdomain = domainParts.pop(); - } - return handlePunycode(); - } - - // At this point we know the public suffix is listed. - parsed.listed = true; - - var tldParts = rule.suffix.split('.'); - var privateParts = domainParts.slice(0, domainParts.length - tldParts.length); - - if (rule.exception) { - privateParts.push(tldParts.shift()); - } - - parsed.tld = tldParts.join('.'); - - if (!privateParts.length) { - return handlePunycode(); - } - - if (rule.wildcard) { - tldParts.unshift(privateParts.pop()); - parsed.tld = tldParts.join('.'); - } - - if (!privateParts.length) { - return handlePunycode(); - } - - parsed.sld = privateParts.pop(); - parsed.domain = [parsed.sld, parsed.tld].join('.'); - - if (privateParts.length) { - parsed.subdomain = privateParts.join('.'); - } - - return handlePunycode(); -}; - - -// -// Get domain. -// -exports.get = function (domain) { - - if (!domain) { - return null; - } - return exports.parse(domain).domain || null; -}; - - -// -// Check whether domain belongs to a known public suffix. -// -exports.isValid = function (domain) { - - var parsed = exports.parse(domain); - return Boolean(parsed.domain && parsed.listed); -}; +}).call(this); /***/ }), @@ -55267,7 +56990,7 @@ module.exports = function(dst, src) { XMLStringifier = __webpack_require__(602); - XMLStringWriter = __webpack_require__(347); + XMLStringWriter = __webpack_require__(750); WriterState = __webpack_require__(541); @@ -57216,313 +58939,7 @@ module.exports = maxSatisfying /* 815 */ /***/ (function(module) { -/*! ***************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, System, Reflect, Promise */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __createBinding; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); -}); - +module.exports = require("punycode"); /***/ }), /* 816 */, @@ -60305,7 +61722,7 @@ exports.deleteKey = deleteKey; processors = __webpack_require__(350); - setImmediate = __webpack_require__(343).setImmediate; + setImmediate = __webpack_require__(213).setImmediate; defaults = __webpack_require__(791).defaults; @@ -60854,7 +62271,6 @@ class Poller { }); } /** - * @internal * Starts a loop that will break only if the poller is done * or if the poller is stopped. */ @@ -60868,7 +62284,6 @@ class Poller { } } /** - * @internal * pollOnce does one polling, by calling to the update method of the underlying * poll operation to make any relevant change effective. * @@ -60881,7 +62296,7 @@ class Poller { if (!this.isDone()) { this.operation = await this.operation.update({ abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this) + fireProgress: this.fireProgress.bind(this), }); if (this.isDone() && this.resolve) { // If the poller has finished polling, this means we now have a result. @@ -60902,7 +62317,6 @@ class Poller { } } /** - * @internal * fireProgress calls the functions passed in via onProgress the method of the poller. * * It loops over all of the callbacks received from onProgress, and executes them, sending them @@ -60916,7 +62330,6 @@ class Poller { } } /** - * @internal * Invokes the underlying operation's cancel method, and rejects the * pollUntilDone promise. */ @@ -61080,13 +62493,6 @@ class Poller { } } -// Copyright (c) Microsoft Corporation. -/** - * The `@azure/logger` configuration for this package. - * @internal - */ -const logger = logger$1.createClientLogger("core-lro"); - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** @@ -61097,7 +62503,7 @@ const logger = logger$1.createClientLogger("core-lro"); */ function getPollingUrl(rawResponse, defaultPath) { var _a, _b, _c; - return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getLocation(rawResponse)) !== null && _b !== void 0 ? _b : getOperationLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); + return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getOperationLocation(rawResponse)) !== null && _b !== void 0 ? _b : getLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); } function getLocation(rawResponse) { return rawResponse.headers["location"]; @@ -61108,26 +62514,36 @@ function getOperationLocation(rawResponse) { function getAzureAsyncOperation(rawResponse) { return rawResponse.headers["azure-asyncoperation"]; } -function inferLroMode(requestPath, requestMethod, rawResponse) { - if (getAzureAsyncOperation(rawResponse) !== undefined) { - return { - mode: "AzureAsync", - resourceLocation: requestMethod === "PUT" - ? requestPath - : requestMethod === "POST" - ? getLocation(rawResponse) - : undefined - }; +function findResourceLocation(requestMethod, rawResponse, requestPath) { + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "POST": + case "PATCH": { + return getLocation(rawResponse); + } + default: { + return undefined; + } } - else if (getLocation(rawResponse) !== undefined || +} +function inferLroMode(requestPath, requestMethod, rawResponse) { + if (getAzureAsyncOperation(rawResponse) !== undefined || getOperationLocation(rawResponse) !== undefined) { return { - mode: "Location" + mode: "Location", + resourceLocation: findResourceLocation(requestMethod, rawResponse, requestPath), + }; + } + else if (getLocation(rawResponse) !== undefined) { + return { + mode: "Location", }; } else if (["PUT", "PATCH"].includes(requestMethod)) { return { - mode: "Body" + mode: "Body", }; } return {}; @@ -61160,50 +62576,6 @@ function isUnexpectedPollingResponse(rawResponse) { const successStates = ["succeeded"]; const failureStates = ["failed", "canceled", "cancelled"]; -// Copyright (c) Microsoft Corporation. -function getResponseStatus(rawResponse) { - var _a; - const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - return typeof status === "string" ? status.toLowerCase() : "succeeded"; -} -function isAzureAsyncPollingDone(rawResponse) { - const state = getResponseStatus(rawResponse); - if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { - throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); - } - return successStates.includes(state); -} -/** - * Sends a request to the URI of the provisioned resource if needed. - */ -async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { - switch (lroResourceLocationConfig) { - case "original-uri": - return lro.sendPollRequest(lro.requestPath); - case "azure-async-operation": - return undefined; - case "location": - default: - return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); - } -} -function processAzureAsyncOperationResult(lro, resourceLocation, lroResourceLocationConfig) { - return (response) => { - if (isAzureAsyncPollingDone(response.rawResponse)) { - if (resourceLocation === undefined) { - return Object.assign(Object.assign({}, response), { done: true }); - } - else { - return Object.assign(Object.assign({}, response), { done: false, next: async () => { - const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); - return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); - } }); - } - } - return Object.assign(Object.assign({}, response), { done: false }); - }; -} - // Copyright (c) Microsoft Corporation. function getProvisioningState(rawResponse) { var _a, _b; @@ -61227,11 +62599,54 @@ function processBodyPollingOperationResult(response) { } // Copyright (c) Microsoft Corporation. -function isLocationPollingDone(rawResponse) { - return !isUnexpectedPollingResponse(rawResponse) && rawResponse.statusCode !== 202; +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +const logger = logger$1.createClientLogger("core-lro"); + +// Copyright (c) Microsoft Corporation. +function isPollingDone(rawResponse) { + var _a; + if (isUnexpectedPollingResponse(rawResponse) || rawResponse.statusCode === 202) { + return false; + } + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const state = typeof status === "string" ? status.toLowerCase() : "succeeded"; + if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { + throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); + } + return successStates.includes(state); } -function processLocationPollingOperationResult(response) { - return Object.assign(Object.assign({}, response), { done: isLocationPollingDone(response.rawResponse) }); +/** + * Sends a request to the URI of the provisioned resource if needed. + */ +async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { + switch (lroResourceLocationConfig) { + case "original-uri": + return lro.sendPollRequest(lro.requestPath); + case "azure-async-operation": + return undefined; + case "location": + default: + return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); + } +} +function processLocationPollingOperationResult(lro, resourceLocation, lroResourceLocationConfig) { + return (response) => { + if (isPollingDone(response.rawResponse)) { + if (resourceLocation === undefined) { + return Object.assign(Object.assign({}, response), { done: true }); + } + else { + return Object.assign(Object.assign({}, response), { done: false, next: async () => { + const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); + return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); + } }); + } + } + return Object.assign(Object.assign({}, response), { done: false }); + }; } // Copyright (c) Microsoft Corporation. @@ -61246,11 +62661,8 @@ function processPassthroughOperationResult(response) { */ function createGetLroStatusFromResponse(lroPrimitives, config, lroResourceLocationConfig) { switch (config.mode) { - case "AzureAsync": { - return processAzureAsyncOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); - } case "Location": { - return processLocationPollingOperationResult; + return processLocationPollingOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); } case "Body": { return processBodyPollingOperationResult; @@ -61268,10 +62680,11 @@ function createPoll(lroPrimitives) { const response = await lroPrimitives.sendPollRequest(path); const retryAfter = response.rawResponse.headers["retry-after"]; if (retryAfter !== undefined) { - const retryAfterInMs = parseInt(retryAfter); - pollerConfig.intervalInMs = isNaN(retryAfterInMs) + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + pollerConfig.intervalInMs = isNaN(retryAfterInSeconds) ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs) - : retryAfterInMs; + : retryAfterInSeconds * 1000; } return getLroStatusFromResponse(response); }; @@ -61394,7 +62807,7 @@ class GenericPollOperation { */ toString() { return JSON.stringify({ - state: this.state + state: this.state, }); } } @@ -61694,7 +63107,8 @@ function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetry return { statusCode: error.statusCode, result: null, - headers: {} + headers: {}, + error }; } else { @@ -62464,7 +63878,7 @@ var SpanStatusCode; "use strict"; -const punycode = __webpack_require__(213); +const punycode = __webpack_require__(815); const tr46 = __webpack_require__(361); const specialSchemes = { diff --git a/dist/setup/index.js b/dist/setup/index.js index a36e04b..0cd24a4 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -724,6 +724,11 @@ function assertDefined(name, value) { return value; } exports.assertDefined = assertDefined; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; //# sourceMappingURL=cacheUtils.js.map /***/ }), @@ -3525,7 +3530,19 @@ exports.StringWalker = StringWalker; //# sourceMappingURL=StringWalker.js.map /***/ }), -/* 48 */, +/* 48 */ +/***/ (function() { + +"use strict"; + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +if (typeof Symbol === undefined || !Symbol.asyncIterator) { + Symbol.asyncIterator = Symbol.for("Symbol.asyncIterator"); +} +//# sourceMappingURL=index.js.map + +/***/ }), /* 49 */, /* 50 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -4192,17 +4209,7 @@ exports.parseURL = __webpack_require__(936).parseURL; /***/ }), -/* 71 */ -/***/ (function() { - -"use strict"; - -if (typeof Symbol === undefined || !Symbol.asyncIterator) { - Symbol.asyncIterator = Symbol.for("Symbol.asyncIterator"); -} -//# sourceMappingURL=index.js.map - -/***/ }), +/* 71 */, /* 72 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -8754,7 +8761,6 @@ class Poller { }); } /** - * @internal * Starts a loop that will break only if the poller is done * or if the poller is stopped. */ @@ -8768,7 +8774,6 @@ class Poller { } } /** - * @internal * pollOnce does one polling, by calling to the update method of the underlying * poll operation to make any relevant change effective. * @@ -8781,7 +8786,7 @@ class Poller { if (!this.isDone()) { this.operation = await this.operation.update({ abortSignal: options.abortSignal, - fireProgress: this.fireProgress.bind(this) + fireProgress: this.fireProgress.bind(this), }); if (this.isDone() && this.resolve) { // If the poller has finished polling, this means we now have a result. @@ -8802,7 +8807,6 @@ class Poller { } } /** - * @internal * fireProgress calls the functions passed in via onProgress the method of the poller. * * It loops over all of the callbacks received from onProgress, and executes them, sending them @@ -8816,7 +8820,6 @@ class Poller { } } /** - * @internal * Invokes the underlying operation's cancel method, and rejects the * pollUntilDone promise. */ @@ -8980,13 +8983,6 @@ class Poller { } } -// Copyright (c) Microsoft Corporation. -/** - * The `@azure/logger` configuration for this package. - * @internal - */ -const logger = logger$1.createClientLogger("core-lro"); - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** @@ -8997,7 +8993,7 @@ const logger = logger$1.createClientLogger("core-lro"); */ function getPollingUrl(rawResponse, defaultPath) { var _a, _b, _c; - return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getLocation(rawResponse)) !== null && _b !== void 0 ? _b : getOperationLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); + return ((_c = (_b = (_a = getAzureAsyncOperation(rawResponse)) !== null && _a !== void 0 ? _a : getOperationLocation(rawResponse)) !== null && _b !== void 0 ? _b : getLocation(rawResponse)) !== null && _c !== void 0 ? _c : defaultPath); } function getLocation(rawResponse) { return rawResponse.headers["location"]; @@ -9008,26 +9004,36 @@ function getOperationLocation(rawResponse) { function getAzureAsyncOperation(rawResponse) { return rawResponse.headers["azure-asyncoperation"]; } -function inferLroMode(requestPath, requestMethod, rawResponse) { - if (getAzureAsyncOperation(rawResponse) !== undefined) { - return { - mode: "AzureAsync", - resourceLocation: requestMethod === "PUT" - ? requestPath - : requestMethod === "POST" - ? getLocation(rawResponse) - : undefined - }; +function findResourceLocation(requestMethod, rawResponse, requestPath) { + switch (requestMethod) { + case "PUT": { + return requestPath; + } + case "POST": + case "PATCH": { + return getLocation(rawResponse); + } + default: { + return undefined; + } } - else if (getLocation(rawResponse) !== undefined || +} +function inferLroMode(requestPath, requestMethod, rawResponse) { + if (getAzureAsyncOperation(rawResponse) !== undefined || getOperationLocation(rawResponse) !== undefined) { return { - mode: "Location" + mode: "Location", + resourceLocation: findResourceLocation(requestMethod, rawResponse, requestPath), + }; + } + else if (getLocation(rawResponse) !== undefined) { + return { + mode: "Location", }; } else if (["PUT", "PATCH"].includes(requestMethod)) { return { - mode: "Body" + mode: "Body", }; } return {}; @@ -9060,50 +9066,6 @@ function isUnexpectedPollingResponse(rawResponse) { const successStates = ["succeeded"]; const failureStates = ["failed", "canceled", "cancelled"]; -// Copyright (c) Microsoft Corporation. -function getResponseStatus(rawResponse) { - var _a; - const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; - return typeof status === "string" ? status.toLowerCase() : "succeeded"; -} -function isAzureAsyncPollingDone(rawResponse) { - const state = getResponseStatus(rawResponse); - if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { - throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); - } - return successStates.includes(state); -} -/** - * Sends a request to the URI of the provisioned resource if needed. - */ -async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { - switch (lroResourceLocationConfig) { - case "original-uri": - return lro.sendPollRequest(lro.requestPath); - case "azure-async-operation": - return undefined; - case "location": - default: - return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); - } -} -function processAzureAsyncOperationResult(lro, resourceLocation, lroResourceLocationConfig) { - return (response) => { - if (isAzureAsyncPollingDone(response.rawResponse)) { - if (resourceLocation === undefined) { - return Object.assign(Object.assign({}, response), { done: true }); - } - else { - return Object.assign(Object.assign({}, response), { done: false, next: async () => { - const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); - return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); - } }); - } - } - return Object.assign(Object.assign({}, response), { done: false }); - }; -} - // Copyright (c) Microsoft Corporation. function getProvisioningState(rawResponse) { var _a, _b; @@ -9127,11 +9089,54 @@ function processBodyPollingOperationResult(response) { } // Copyright (c) Microsoft Corporation. -function isLocationPollingDone(rawResponse) { - return !isUnexpectedPollingResponse(rawResponse) && rawResponse.statusCode !== 202; +/** + * The `@azure/logger` configuration for this package. + * @internal + */ +const logger = logger$1.createClientLogger("core-lro"); + +// Copyright (c) Microsoft Corporation. +function isPollingDone(rawResponse) { + var _a; + if (isUnexpectedPollingResponse(rawResponse) || rawResponse.statusCode === 202) { + return false; + } + const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; + const state = typeof status === "string" ? status.toLowerCase() : "succeeded"; + if (isUnexpectedPollingResponse(rawResponse) || failureStates.includes(state)) { + throw new Error(`The long running operation has failed. The provisioning state: ${state}.`); + } + return successStates.includes(state); } -function processLocationPollingOperationResult(response) { - return Object.assign(Object.assign({}, response), { done: isLocationPollingDone(response.rawResponse) }); +/** + * Sends a request to the URI of the provisioned resource if needed. + */ +async function sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig) { + switch (lroResourceLocationConfig) { + case "original-uri": + return lro.sendPollRequest(lro.requestPath); + case "azure-async-operation": + return undefined; + case "location": + default: + return lro.sendPollRequest(resourceLocation !== null && resourceLocation !== void 0 ? resourceLocation : lro.requestPath); + } +} +function processLocationPollingOperationResult(lro, resourceLocation, lroResourceLocationConfig) { + return (response) => { + if (isPollingDone(response.rawResponse)) { + if (resourceLocation === undefined) { + return Object.assign(Object.assign({}, response), { done: true }); + } + else { + return Object.assign(Object.assign({}, response), { done: false, next: async () => { + const finalResponse = await sendFinalRequest(lro, resourceLocation, lroResourceLocationConfig); + return Object.assign(Object.assign({}, (finalResponse !== null && finalResponse !== void 0 ? finalResponse : response)), { done: true }); + } }); + } + } + return Object.assign(Object.assign({}, response), { done: false }); + }; } // Copyright (c) Microsoft Corporation. @@ -9146,11 +9151,8 @@ function processPassthroughOperationResult(response) { */ function createGetLroStatusFromResponse(lroPrimitives, config, lroResourceLocationConfig) { switch (config.mode) { - case "AzureAsync": { - return processAzureAsyncOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); - } case "Location": { - return processLocationPollingOperationResult; + return processLocationPollingOperationResult(lroPrimitives, config.resourceLocation, lroResourceLocationConfig); } case "Body": { return processBodyPollingOperationResult; @@ -9168,10 +9170,11 @@ function createPoll(lroPrimitives) { const response = await lroPrimitives.sendPollRequest(path); const retryAfter = response.rawResponse.headers["retry-after"]; if (retryAfter !== undefined) { - const retryAfterInMs = parseInt(retryAfter); - pollerConfig.intervalInMs = isNaN(retryAfterInMs) + // Retry-After header value is either in HTTP date format, or in seconds + const retryAfterInSeconds = parseInt(retryAfter); + pollerConfig.intervalInMs = isNaN(retryAfterInSeconds) ? calculatePollingIntervalFromDate(new Date(retryAfter), pollerConfig.intervalInMs) - : retryAfterInMs; + : retryAfterInSeconds * 1000; } return getLroStatusFromResponse(response); }; @@ -9294,7 +9297,7 @@ class GenericPollOperation { */ toString() { return JSON.stringify({ - state: this.state + state: this.state, }); } } @@ -9604,18 +9607,18 @@ function downloadCache(archiveLocation, archivePath, options) { exports.downloadCache = downloadCache; // Reserve Cache function reserveCache(key, paths, options) { - var _a, _b; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod); const reserveCacheRequest = { key, - version + version, + cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize }; const response = yield requestUtils_1.retryTypedResponse('reserveCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); })); - return (_b = (_a = response === null || response === void 0 ? void 0 : response.result) === null || _a === void 0 ? void 0 : _a.cacheId) !== null && _b !== void 0 ? _b : -1; + return response; }); } exports.reserveCache = reserveCache; @@ -9767,14 +9770,15 @@ var DiagAPI = /** @class */ (function () { function DiagAPI() { function _logProxy(funcName) { return function () { + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } var logger = global_utils_1.getGlobal('diag'); // shortcut if logger not set if (!logger) return; - return logger[funcName].apply(logger, - // work around Function.prototype.apply types - // eslint-disable-next-line @typescript-eslint/no-explicit-any - arguments); + return logger[funcName].apply(logger, args); }; } // Using self local variable for minification purposes as 'this' cannot be minified @@ -10486,7 +10490,7 @@ exports.ROOT_CONTEXT = new BaseContext(); Object.defineProperty(exports, "__esModule", { value: true }); exports.VERSION = void 0; // this is autogenerated file, see scripts/version-update.js -exports.VERSION = '1.0.3'; +exports.VERSION = '1.0.4'; //# sourceMappingURL=version.js.map /***/ }), @@ -14262,7 +14266,7 @@ exports.DOMParser = DOMParserImpl_1.DOMParserImpl; /* 213 */ /***/ (function(module) { -module.exports = require("punycode"); +module.exports = require("timers"); /***/ }), /* 214 */, @@ -14811,17 +14815,22 @@ var DiagConsoleLogger = /** @class */ (function () { function DiagConsoleLogger() { function _consoleFunc(funcName) { return function () { - var orgArguments = arguments; + var args = []; + for (var _i = 0; _i < arguments.length; _i++) { + args[_i] = arguments[_i]; + } if (console) { // Some environments only expose the console when the F12 developer console is open + // eslint-disable-next-line no-console var theFunc = console[funcName]; if (typeof theFunc !== 'function') { // Not all environments support all functions + // eslint-disable-next-line no-console theFunc = console.log; } // One last final check if (typeof theFunc === 'function') { - return theFunc.apply(console, orgArguments); + return theFunc.apply(console, args); } } }; @@ -19119,26 +19128,53 @@ var MatchKind; Object.defineProperty(exports, '__esModule', { value: true }); -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - var uuid = __webpack_require__(585); -var tough = __webpack_require__(929); +var util = __webpack_require__(669); +var tslib = __webpack_require__(909); +var xml2js = __webpack_require__(992); +var abortController = __webpack_require__(819); +var logger$1 = __webpack_require__(492); +var coreAuth = __webpack_require__(229); +var os = __webpack_require__(87); var http = __webpack_require__(605); var https = __webpack_require__(34); -var node_fetch = _interopDefault(__webpack_require__(454)); -var abortController = __webpack_require__(819); -var FormData = _interopDefault(__webpack_require__(790)); -var util = __webpack_require__(669); -var url = __webpack_require__(835); -var stream = __webpack_require__(794); -var logger$1 = __webpack_require__(492); +var tough = __webpack_require__(929); var tunnel = __webpack_require__(856); -var tslib = __webpack_require__(909); -var coreAuth = __webpack_require__(229); -var xml2js = __webpack_require__(992); -var os = __webpack_require__(87); +var stream = __webpack_require__(794); +var FormData = __webpack_require__(790); +var node_fetch = __webpack_require__(454); var coreTracing = __webpack_require__(72); -__webpack_require__(71); +var url = __webpack_require__(835); +__webpack_require__(48); + +function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } + +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var xml2js__namespace = /*#__PURE__*/_interopNamespace(xml2js); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var http__namespace = /*#__PURE__*/_interopNamespace(http); +var https__namespace = /*#__PURE__*/_interopNamespace(https); +var tough__namespace = /*#__PURE__*/_interopNamespace(tough); +var tunnel__namespace = /*#__PURE__*/_interopNamespace(tunnel); +var FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData); +var node_fetch__default = /*#__PURE__*/_interopDefaultLegacy(node_fetch); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -19187,7 +19223,7 @@ class HttpHeaders { set(headerName, headerValue) { this._headersMap[getHeaderKey(headerName)] = { name: headerName, - value: headerValue.toString() + value: headerValue.toString(), }; } /** @@ -19219,12 +19255,7 @@ class HttpHeaders { * Get the headers that are contained this collection as an object. */ rawHeaders() { - const result = {}; - for (const headerKey in this._headersMap) { - const header = this._headersMap[headerKey]; - result[header.name.toLowerCase()] = header.value; - } - return result; + return this.toJson({ preserveCase: true }); } /** * Get the headers that are contained in this collection as an array. @@ -19261,14 +19292,27 @@ class HttpHeaders { /** * Get the JSON object representation of this HTTP header collection. */ - toJson() { - return this.rawHeaders(); + toJson(options = {}) { + const result = {}; + if (options.preserveCase) { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[header.name] = header.value; + } + } + else { + for (const headerKey in this._headersMap) { + const header = this._headersMap[headerKey]; + result[getHeaderKey(header.name)] = header.value; + } + } + return result; } /** * Get the string representation of this HTTP header collection. */ toString() { - return JSON.stringify(this.toJson()); + return JSON.stringify(this.toJson({ preserveCase: true })); } /** * Create a deep clone/copy of this HttpHeaders collection. @@ -19312,11 +19356,14 @@ function decodeString(value) { // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. +/** + * A set of constants used internally when processing requests. + */ const Constants = { /** * The core-http version */ - coreHttpVersion: "2.2.2", + coreHttpVersion: "2.2.4", /** * Specifies HTTP. */ @@ -19352,12 +19399,12 @@ const Constants = { POST: "POST", MERGE: "MERGE", HEAD: "HEAD", - PATCH: "PATCH" + PATCH: "PATCH", }, StatusCodes: { TooManyRequests: 429, - ServiceUnavailable: 503 - } + ServiceUnavailable: 503, + }, }, /** * Defines constants for use with HTTP headers. @@ -19377,8 +19424,8 @@ const Constants = { /** * The UserAgent header. */ - USER_AGENT: "User-Agent" - } + USER_AGENT: "User-Agent", + }, }; // Copyright (c) Microsoft Corporation. @@ -19593,18 +19640,38 @@ function isObject(input) { } // Copyright (c) Microsoft Corporation. +// This file contains utility code to serialize and deserialize network operations according to `OperationSpec` objects generated by AutoRest.TypeScript from OpenAPI specifications. +/** + * Used to map raw response objects to final shapes. + * Helps packing and unpacking Dates and other encoded types that are not intrinsic to JSON. + * Also allows pulling values from headers, as well as inserting default values and constants. + */ class Serializer { - constructor(modelMappers = {}, isXML) { + constructor( + /** + * The provided model mapper. + */ + modelMappers = {}, + /** + * Whether the contents are XML or not. + */ + isXML) { this.modelMappers = modelMappers; this.isXML = isXML; } + /** + * Validates constraints, if any. This function will throw if the provided value does not respect those constraints. + * @param mapper - The definition of data models. + * @param value - The value. + * @param objectName - Name of the object. Used in the error messages. + */ validateConstraints(mapper, value, objectName) { const failValidation = (constraintName, constraintValue) => { throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); }; if (mapper.constraints && value != undefined) { const valueAsNumber = value; - const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems } = mapper.constraints; + const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; if (ExclusiveMaximum != undefined && valueAsNumber >= ExclusiveMaximum) { failValidation("ExclusiveMaximum", ExclusiveMaximum); } @@ -19646,20 +19713,20 @@ class Serializer { } } /** - * Serialize the given object based on its metadata defined in the mapper + * Serialize the given object based on its metadata defined in the mapper. * - * @param mapper - The mapper which defines the metadata of the serializable object - * @param object - A valid Javascript object to be serialized - * @param objectName - Name of the serialized object - * @param options - additional options to deserialization - * @returns A valid serialized Javascript object + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param object - A valid Javascript object to be serialized. + * @param objectName - Name of the serialized object. + * @param options - additional options to deserialization. + * @returns A valid serialized Javascript object. */ serialize(mapper, object, objectName, options = {}) { var _a, _b, _c; const updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, }; let payload = {}; const mapperType = mapper.type.name; @@ -19729,20 +19796,20 @@ class Serializer { return payload; } /** - * Deserialize the given object based on its metadata defined in the mapper + * Deserialize the given object based on its metadata defined in the mapper. * - * @param mapper - The mapper which defines the metadata of the serializable object - * @param responseBody - A valid Javascript entity to be deserialized - * @param objectName - Name of the deserialized object + * @param mapper - The mapper which defines the metadata of the serializable object. + * @param responseBody - A valid Javascript entity to be deserialized. + * @param objectName - Name of the deserialized object. * @param options - Controls behavior of XML parser and builder. - * @returns A valid deserialized Javascript object + * @returns A valid deserialized Javascript object. */ deserialize(mapper, responseBody, objectName, options = {}) { var _a, _b, _c; const updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, }; if (responseBody == undefined) { if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { @@ -19841,9 +19908,7 @@ function bufferToBase64Url(buffer) { // Uint8Array to Base64. const str = encodeByteArray(buffer); // Base64 to Base64Url. - return trimEnd(str, "=") - .replace(/\+/g, "-") - .replace(/\//g, "_"); + return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); } function base64UrlToByteArray(str) { if (!str) { @@ -20059,10 +20124,10 @@ function serializeDictionaryType(serializer, mapper, object, objectName, isXml, return tempDictionary; } /** - * Resolves the additionalProperties property from a referenced mapper - * @param serializer - The serializer containing the entire set of mappers - * @param mapper - The composite mapper to resolve - * @param objectName - Name of the object being serialized + * Resolves the additionalProperties property from a referenced mapper. + * @param serializer - The serializer containing the entire set of mappers. + * @param mapper - The composite mapper to resolve. + * @param objectName - Name of the object being serialized. */ function resolveAdditionalProperties(serializer, mapper, objectName) { const additionalProperties = mapper.type.additionalProperties; @@ -20073,7 +20138,7 @@ function resolveAdditionalProperties(serializer, mapper, objectName) { return additionalProperties; } /** - * Finds the mapper referenced by className + * Finds the mapper referenced by `className`. * @param serializer - The serializer containing the entire set of mappers * @param mapper - The composite mapper to resolve * @param objectName - Name of the object being serialized @@ -20412,7 +20477,9 @@ function getPolymorphicDiscriminatorSafely(serializer, typeName) { serializer.modelMappers[typeName] && serializer.modelMappers[typeName].type.polymorphicDiscriminator); } -// TODO: why is this here? +/** + * Utility function that serializes an object that might contain binary information into a plain object, array or a string. + */ function serializeObject(toSerialize) { const castToSerialize = toSerialize; if (toSerialize == undefined) @@ -20450,6 +20517,9 @@ function strEnum(o) { } return result; } +/** + * String enum containing the string types of property mappers. + */ // eslint-disable-next-line @typescript-eslint/no-redeclare const MapperType = strEnum([ "Base64Url", @@ -20467,7 +20537,7 @@ const MapperType = strEnum([ "String", "Stream", "TimeSpan", - "UnixTime" + "UnixTime", ]); // Copyright (c) Microsoft Corporation. @@ -20730,9 +20800,6 @@ class WebResource { } } -// Copyright (c) Microsoft Corporation. -const custom = util.inspect.custom; - // Copyright (c) Microsoft Corporation. /** * A class that handles the query portion of a URLBuilder. @@ -21030,6 +21097,10 @@ class URLBuilder { } } } + /** + * Serializes the URL as a string. + * @returns the URL as a string. + */ toString() { let result = ""; if (this._scheme) { @@ -21065,6 +21136,9 @@ class URLBuilder { this.setQuery(replaceAll(this.getQuery(), searchValue, replaceValue)); } } + /** + * Parses a given string URL into a new {@link URLBuilder}. + */ static parse(text) { const result = new URLBuilder(); result.set(text, "SCHEME_OR_HOST"); @@ -21321,6 +21395,60 @@ function nextQuery(tokenizer) { tokenizer._currentState = "DONE"; } +// Copyright (c) Microsoft Corporation. +function createProxyAgent(requestUrl, proxySettings, headers) { + const host = URLBuilder.parse(proxySettings.host).getHost(); + if (!host) { + throw new Error("Expecting a non-empty host in proxy settings."); + } + if (!isValidPort(proxySettings.port)) { + throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); + } + const tunnelOptions = { + proxy: { + host: host, + port: proxySettings.port, + headers: (headers && headers.rawHeaders()) || {}, + }, + }; + if (proxySettings.username && proxySettings.password) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; + } + else if (proxySettings.username) { + tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; + } + const isRequestHttps = isUrlHttps(requestUrl); + const isProxyHttps = isUrlHttps(proxySettings.host); + const proxyAgent = { + isHttps: isRequestHttps, + agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions), + }; + return proxyAgent; +} +function isUrlHttps(url) { + const urlScheme = URLBuilder.parse(url).getScheme() || ""; + return urlScheme.toLowerCase() === "https"; +} +function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { + if (isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpsOverHttps(tunnelOptions); + } + else if (isRequestHttps && !isProxyHttps) { + return tunnel__namespace.httpsOverHttp(tunnelOptions); + } + else if (!isRequestHttps && isProxyHttps) { + return tunnel__namespace.httpOverHttps(tunnelOptions); + } + else { + return tunnel__namespace.httpOverHttp(tunnelOptions); + } +} +function isValidPort(port) { + // any port in 0-65535 range is valid (RFC 793) even though almost all implementations + // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports + return 0 <= port && port <= 65535; +} + // Copyright (c) Microsoft Corporation. const RedactedString = "REDACTED"; const defaultAllowedHeaderNames = [ @@ -21361,7 +21489,7 @@ const defaultAllowedHeaderNames = [ "Retry-After", "Server", "Transfer-Encoding", - "User-Agent" + "User-Agent", ]; const defaultAllowedQueryParameters = ["api-version"]; class Sanitizer { @@ -21454,8 +21582,14 @@ class Sanitizer { } } +// Copyright (c) Microsoft Corporation. +const custom = util.inspect.custom; + // Copyright (c) Microsoft Corporation. const errorSanitizer = new Sanitizer(); +/** + * An error resulting from an HTTP request to a service endpoint. + */ class RestError extends Error { constructor(message, code, statusCode, request, response) { super(message); @@ -21473,13 +21607,22 @@ class RestError extends Error { return `RestError: ${this.message} \n ${errorSanitizer.sanitize(this)}`; } } +/** + * A constant string to identify errors that may arise when making an HTTP request that indicates an issue with the transport layer (e.g. the hostname of the URL cannot be resolved via DNS.) + */ RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; +/** + * A constant string to identify errors that may arise from parsing an incoming HTTP response. Usually indicates a malformed HTTP body, such as an encoded JSON payload that is incomplete. + */ RestError.PARSE_ERROR = "PARSE_ERROR"; // Copyright (c) Microsoft Corporation. const logger = logger$1.createClientLogger("core-http"); // Copyright (c) Microsoft Corporation. +function getCachedAgent(isHttps, agentCache) { + return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; +} class ReportTransform extends stream.Transform { constructor(progressCallback) { super(); @@ -21493,7 +21636,44 @@ class ReportTransform extends stream.Transform { callback(undefined); } } -class FetchHttpClient { +function isReadableStream(body) { + return body && typeof body.pipe === "function"; +} +function isStreamComplete(stream, aborter) { + return new Promise((resolve) => { + stream.once("close", () => { + aborter === null || aborter === void 0 ? void 0 : aborter.abort(); + resolve(); + }); + stream.once("end", resolve); + stream.once("error", resolve); + }); +} +/** + * Transforms a set of headers into the key/value pair defined by {@link HttpHeadersLike} + */ +function parseHeaders(headers) { + const httpHeaders = new HttpHeaders(); + headers.forEach((value, key) => { + httpHeaders.set(key, value); + }); + return httpHeaders; +} +/** + * An HTTP client that uses `node-fetch`. + */ +class NodeFetchHttpClient { + constructor() { + // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent + this.proxyAgentMap = new Map(); + this.keepAliveAgents = {}; + this.cookieJar = new tough__namespace.CookieJar(undefined, { looseMode: true }); + } + /** + * Provides minimum viable error handling and the logic that executes the abstract methods. + * @param httpRequest - Object representing the outgoing HTTP request. + * @returns An object representing the incoming HTTP response. + */ async sendRequest(httpRequest) { var _a; if (!httpRequest && typeof httpRequest !== "object") { @@ -21519,7 +21699,7 @@ class FetchHttpClient { } if (httpRequest.formData) { const formData = httpRequest.formData; - const requestForm = new FormData(); + const requestForm = new FormData__default["default"](); const appendFormValue = (key, value) => { // value function probably returns a stream so we can provide a fresh stream on each retry if (typeof value === "function") { @@ -21589,7 +21769,7 @@ class FetchHttpClient { readableStreamBody: streaming ? response.body : undefined, - bodyAsText: !streaming ? await response.text() : undefined + bodyAsText: !streaming ? await response.text() : undefined, }; const onDownloadProgress = httpRequest.onDownloadProgress; if (onDownloadProgress) { @@ -21643,94 +21823,6 @@ class FetchHttpClient { } } } -} -function isReadableStream(body) { - return body && typeof body.pipe === "function"; -} -function isStreamComplete(stream, aborter) { - return new Promise((resolve) => { - stream.once("close", () => { - aborter === null || aborter === void 0 ? void 0 : aborter.abort(); - resolve(); - }); - stream.once("end", resolve); - stream.once("error", resolve); - }); -} -function parseHeaders(headers) { - const httpHeaders = new HttpHeaders(); - headers.forEach((value, key) => { - httpHeaders.set(key, value); - }); - return httpHeaders; -} - -// Copyright (c) Microsoft Corporation. -function createProxyAgent(requestUrl, proxySettings, headers) { - const host = URLBuilder.parse(proxySettings.host).getHost(); - if (!host) { - throw new Error("Expecting a non-empty host in proxy settings."); - } - if (!isValidPort(proxySettings.port)) { - throw new Error("Expecting a valid port number in the range of [0, 65535] in proxy settings."); - } - const tunnelOptions = { - proxy: { - host: host, - port: proxySettings.port, - headers: (headers && headers.rawHeaders()) || {} - } - }; - if (proxySettings.username && proxySettings.password) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}:${proxySettings.password}`; - } - else if (proxySettings.username) { - tunnelOptions.proxy.proxyAuth = `${proxySettings.username}`; - } - const isRequestHttps = isUrlHttps(requestUrl); - const isProxyHttps = isUrlHttps(proxySettings.host); - const proxyAgent = { - isHttps: isRequestHttps, - agent: createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) - }; - return proxyAgent; -} -function isUrlHttps(url) { - const urlScheme = URLBuilder.parse(url).getScheme() || ""; - return urlScheme.toLowerCase() === "https"; -} -function createTunnel(isRequestHttps, isProxyHttps, tunnelOptions) { - if (isRequestHttps && isProxyHttps) { - return tunnel.httpsOverHttps(tunnelOptions); - } - else if (isRequestHttps && !isProxyHttps) { - return tunnel.httpsOverHttp(tunnelOptions); - } - else if (!isRequestHttps && isProxyHttps) { - return tunnel.httpOverHttps(tunnelOptions); - } - else { - return tunnel.httpOverHttp(tunnelOptions); - } -} -function isValidPort(port) { - // any port in 0-65535 range is valid (RFC 793) even though almost all implementations - // will reserve 0 for a specific purpose, and a range of numbers for ephemeral ports - return 0 <= port && port <= 65535; -} - -// Copyright (c) Microsoft Corporation. -function getCachedAgent(isHttps, agentCache) { - return isHttps ? agentCache.httpsAgent : agentCache.httpAgent; -} -class NodeFetchHttpClient extends FetchHttpClient { - constructor() { - super(...arguments); - // a mapping of proxy settings string `${host}:${port}:${username}:${password}` to agent - this.proxyAgentMap = new Map(); - this.keepAliveAgents = {}; - this.cookieJar = new tough.CookieJar(undefined, { looseMode: true }); - } getOrCreateAgent(httpRequest) { var _a; const isHttps = isUrlHttps(httpRequest.url); @@ -21762,24 +21854,30 @@ class NodeFetchHttpClient extends FetchHttpClient { return agent; } const agentOptions = { - keepAlive: httpRequest.keepAlive + keepAlive: httpRequest.keepAlive, }; if (isHttps) { - agent = this.keepAliveAgents.httpsAgent = new https.Agent(agentOptions); + agent = this.keepAliveAgents.httpsAgent = new https__namespace.Agent(agentOptions); } else { - agent = this.keepAliveAgents.httpAgent = new http.Agent(agentOptions); + agent = this.keepAliveAgents.httpAgent = new http__namespace.Agent(agentOptions); } return agent; } else { - return isHttps ? https.globalAgent : http.globalAgent; + return isHttps ? https__namespace.globalAgent : http__namespace.globalAgent; } } + /** + * Uses `node-fetch` to perform the request. + */ // eslint-disable-next-line @azure/azure-sdk/ts-apisurface-standardized-verbs async fetch(input, init) { - return node_fetch(input, init); + return node_fetch__default["default"](input, init); } + /** + * Prepares a request based on the provided web resource. + */ async prepareRequest(httpRequest) { const requestInit = {}; if (this.cookieJar && !httpRequest.headers.get("Cookie")) { @@ -21800,6 +21898,9 @@ class NodeFetchHttpClient extends FetchHttpClient { requestInit.compress = httpRequest.decompressResponse; return requestInit; } + /** + * Process an HTTP response. Handles persisting a cookie for subsequent requests if the response has a "Set-Cookie" header. + */ async processRequest(operationResponse) { if (this.cookieJar) { const setCookieHeader = operationResponse.headers.get("Set-Cookie"); @@ -21820,6 +21921,11 @@ class NodeFetchHttpClient extends FetchHttpClient { } // Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The different levels of logs that can be used with the HttpPipelineLogger. + */ +exports.HttpPipelineLogLevel = void 0; (function (HttpPipelineLogLevel) { /** * A log level that indicates that no logs will be logged. @@ -21839,6 +21945,7 @@ class NodeFetchHttpClient extends FetchHttpClient { HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; })(exports.HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = {})); +// Copyright (c) Microsoft Corporation. /** * Converts an OperationOptions to a RequestOptionsBase * @@ -21860,8 +21967,22 @@ function operationOptionsToRequestOptionsBase(opts) { } // Copyright (c) Microsoft Corporation. +/** + * The base class from which all request policies derive. + */ class BaseRequestPolicy { - constructor(_nextPolicy, _options) { + /** + * The main method to implement that manipulates a request/response. + */ + constructor( + /** + * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. + */ + _nextPolicy, + /** + * The options that can be passed to a given request policy. + */ + _options) { this._nextPolicy = _nextPolicy; this._options = _options; } @@ -21913,113 +22034,6 @@ class RequestPolicyOptions { } } -// Copyright (c) Microsoft Corporation. -function logPolicy(loggingOptions = {}) { - return { - create: (nextPolicy, options) => { - return new LogPolicy(nextPolicy, options, loggingOptions); - } - }; -} -class LogPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [] } = {}) { - super(nextPolicy, options); - this.logger = logger$1; - this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - get allowedHeaderNames() { - return this.sanitizer.allowedHeaderNames; - } - /** - * Header names whose values will be logged when logging is enabled. Defaults to - * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers - * specified in this field will be added to that list. Any other values will - * be written to logs as "REDACTED". - * @deprecated Pass these into the constructor instead. - */ - set allowedHeaderNames(allowedHeaderNames) { - this.sanitizer.allowedHeaderNames = allowedHeaderNames; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - get allowedQueryParameters() { - return this.sanitizer.allowedQueryParameters; - } - /** - * Query string names whose values will be logged when logging is enabled. By default no - * query string values are logged. - * @deprecated Pass these into the constructor instead. - */ - set allowedQueryParameters(allowedQueryParameters) { - this.sanitizer.allowedQueryParameters = allowedQueryParameters; - } - sendRequest(request) { - if (!this.logger.enabled) - return this._nextPolicy.sendRequest(request); - this.logRequest(request); - return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); - } - logRequest(request) { - this.logger(`Request: ${this.sanitizer.sanitize(request)}`); - } - logResponse(response) { - this.logger(`Response status code: ${response.status}`); - this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); - return response; - } -} - -// Copyright (c) Microsoft Corporation. -// Licensed under the MIT license. -/** - * Get the path to this parameter's value as a dotted string (a.b.c). - * @param parameter - The parameter to get the path string for. - * @returns The path to this parameter's value as a dotted string. - */ -function getPathStringFromParameter(parameter) { - return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); -} -function getPathStringFromParameterPath(parameterPath, mapper) { - let result; - if (typeof parameterPath === "string") { - result = parameterPath; - } - else if (Array.isArray(parameterPath)) { - result = parameterPath.join("."); - } - else { - result = mapper.serializedName; - } - return result; -} - -// Copyright (c) Microsoft Corporation. -/** - * Gets the list of status codes for streaming responses. - * @internal - */ -function getStreamResponseStatusCodes(operationSpec) { - const result = new Set(); - for (const statusCode in operationSpec.responses) { - const operationResponse = operationSpec.responses[statusCode]; - if (operationResponse.bodyMapper && - operationResponse.bodyMapper.type.name === MapperType.Stream) { - result.add(Number(statusCode)); - } - } - return result; -} - // Copyright (c) Microsoft Corporation. // Note: The reason we re-define all of the xml2js default settings (version 2.0) here is because the default settings object exposed // by the xm2js library is mutable. See https://github.com/Leonidas-from-XIV/node-xml2js/issues/536 @@ -22052,18 +22066,18 @@ const xml2jsDefaultOptionsV2 = { xmldec: { version: "1.0", encoding: "UTF-8", - standalone: true + standalone: true, }, doctype: undefined, renderOpts: { pretty: true, indent: " ", - newline: "\n" + newline: "\n", }, headless: false, chunkSize: 10000, emptyTag: "", - cdata: false + cdata: false, }; // The xml2js settings for general XML parsing operations. const xml2jsParserSettings = Object.assign({}, xml2jsDefaultOptionsV2); @@ -22072,7 +22086,7 @@ xml2jsParserSettings.explicitArray = false; const xml2jsBuilderSettings = Object.assign({}, xml2jsDefaultOptionsV2); xml2jsBuilderSettings.explicitArray = false; xml2jsBuilderSettings.renderOpts = { - pretty: false + pretty: false, }; /** * Converts given JSON object to XML string @@ -22083,7 +22097,7 @@ function stringifyXML(obj, opts = {}) { var _a; xml2jsBuilderSettings.rootName = opts.rootName; xml2jsBuilderSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const builder = new xml2js.Builder(xml2jsBuilderSettings); + const builder = new xml2js__namespace.Builder(xml2jsBuilderSettings); return builder.buildObject(obj); } /** @@ -22095,7 +22109,7 @@ function parseXML(str, opts = {}) { var _a; xml2jsParserSettings.explicitRoot = !!opts.includeRoot; xml2jsParserSettings.charkey = (_a = opts.xmlCharKey) !== null && _a !== void 0 ? _a : XML_CHARKEY; - const xmlParser = new xml2js.Parser(xml2jsParserSettings); + const xmlParser = new xml2js__namespace.Parser(xml2jsParserSettings); return new Promise((resolve, reject) => { if (!str) { reject(new Error("Document is empty")); @@ -22122,7 +22136,7 @@ function deserializationPolicy(deserializationContentTypes, parsingOptions) { return { create: (nextPolicy, options) => { return new DeserializationPolicy(nextPolicy, options, deserializationContentTypes, parsingOptions); - } + }, }; } const defaultJsonContentTypes = ["application/json", "text/json"]; @@ -22130,8 +22144,8 @@ const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; const DefaultDeserializationOptions = { expectedContentTypes: { json: defaultJsonContentTypes, - xml: defaultXmlContentTypes - } + xml: defaultXmlContentTypes, + }, }; /** * A RequestPolicy that will deserialize HTTP response bodies and headers as they pass through the @@ -22149,7 +22163,7 @@ class DeserializationPolicy extends BaseRequestPolicy { } async sendRequest(request) { return this._nextPolicy.sendRequest(request).then((response) => deserializeResponseBody(this.jsonContentTypes, this.xmlContentTypes, response, { - xmlCharKey: this.xmlCharKey + xmlCharKey: this.xmlCharKey, })); } } @@ -22182,12 +22196,20 @@ function shouldDeserializeResponse(parsedResponse) { } return result; } +/** + * Given a particular set of content types to parse as either JSON or XML, consumes the HTTP response to produce the result object defined by the request's {@link OperationSpec}. + * @param jsonContentTypes - Response content types to parse the body as JSON. + * @param xmlContentTypes - Response content types to parse the body as XML. + * @param response - HTTP Response from the pipeline. + * @param options - Options to the serializer, mostly for configuring the XML parser if needed. + * @returns A parsed {@link HttpOperationResponse} object that can be returned by the {@link ServiceClient}. + */ function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options = {}) { var _a, _b, _c; const updatedOptions = { rootName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.includeRoot) !== null && _b !== void 0 ? _b : false, - xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY + xmlCharKey: (_c = options.xmlCharKey) !== null && _c !== void 0 ? _c : XML_CHARKEY, }; return parse(jsonContentTypes, xmlContentTypes, response, updatedOptions).then((parsedResponse) => { if (!shouldDeserializeResponse(parsedResponse)) { @@ -22338,6 +22360,113 @@ function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts) { return Promise.resolve(operationResponse); } +// Copyright (c) Microsoft Corporation. +/** + * By default, HTTP connections are maintained for future requests. + */ +const DefaultKeepAliveOptions = { + enable: true, +}; +/** + * Creates a policy that controls whether HTTP connections are maintained on future requests. + * @param keepAliveOptions - Keep alive options. By default, HTTP connections are maintained for future requests. + * @returns An instance of the {@link KeepAlivePolicy} + */ +function keepAlivePolicy(keepAliveOptions) { + return { + create: (nextPolicy, options) => { + return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); + }, + }; +} +/** + * KeepAlivePolicy is a policy used to control keep alive settings for every request. + */ +class KeepAlivePolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + * + * @param nextPolicy - + * @param options - + * @param keepAliveOptions - + */ + constructor(nextPolicy, options, keepAliveOptions) { + super(nextPolicy, options); + this.keepAliveOptions = keepAliveOptions; + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.keepAlive = this.keepAliveOptions.enable; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Methods that are allowed to follow redirects 301 and 302 + */ +const allowedRedirect = ["GET", "HEAD"]; +const DefaultRedirectOptions = { + handleRedirects: true, + maxRetries: 20, +}; +/** + * Creates a redirect policy, which sends a repeats the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + * @param maximumRetries - Maximum number of redirects to follow. + * @returns An instance of the {@link RedirectPolicy} + */ +function redirectPolicy(maximumRetries = 20) { + return { + create: (nextPolicy, options) => { + return new RedirectPolicy(nextPolicy, options, maximumRetries); + }, + }; +} +/** + * Resends the request to a new destination if a response arrives with a "location" header, and a status code between 300 and 307. + */ +class RedirectPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, maxRetries = 20) { + super(nextPolicy, options); + this.maxRetries = maxRetries; + } + sendRequest(request) { + return this._nextPolicy + .sendRequest(request) + .then((response) => handleRedirect(this, response, 0)); + } +} +function handleRedirect(policy, response, currentRetries) { + const { request, status } = response; + const locationHeader = response.headers.get("location"); + if (locationHeader && + (status === 300 || + (status === 301 && allowedRedirect.includes(request.method)) || + (status === 302 && allowedRedirect.includes(request.method)) || + (status === 303 && request.method === "POST") || + status === 307) && + (!policy.maxRetries || currentRetries < policy.maxRetries)) { + const builder = URLBuilder.parse(request.url); + builder.setPath(locationHeader); + request.url = builder.toString(); + // POST request with Status code 303 should be converted into a + // redirected GET request if the redirect url is present in the location header + if (status === 303) { + request.method = "GET"; + delete request.body; + } + return policy._nextPolicy + .sendRequest(request) + .then((res) => handleRedirect(policy, res, currentRetries + 1)); + } + return Promise.resolve(response); +} + // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. const DEFAULT_CLIENT_RETRY_COUNT = 3; @@ -22401,7 +22530,7 @@ function isDefined(thing) { } // Copyright (c) Microsoft Corporation. -const StandardAbortMessage = "The operation was aborted."; +const StandardAbortMessage$1 = "The operation was aborted."; /** * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. * @param delayInMs - The number of milliseconds to be delayed. @@ -22416,7 +22545,7 @@ function delay(delayInMs, value, options) { let timer = undefined; let onAborted = undefined; const rejectOnAbort = () => { - return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); + return reject(new abortController.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage$1)); }; const removeListeners = () => { if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { @@ -22444,20 +22573,34 @@ function delay(delayInMs, value, options) { } // Copyright (c) Microsoft Corporation. +/** + * Policy that retries the request as many times as configured for as long as the max retry time interval specified, each retry waiting longer to begin than the last time. + * @param retryCount - Maximum number of retries. + * @param retryInterval - Base time between retries. + * @param maxRetryInterval - Maximum time to wait between retries. + */ function exponentialRetryPolicy(retryCount, retryInterval, maxRetryInterval) { return { create: (nextPolicy, options) => { return new ExponentialRetryPolicy(nextPolicy, options, retryCount, retryInterval, maxRetryInterval); - } + }, }; } +/** + * Describes the Retry Mode type. Currently supporting only Exponential. + */ +exports.RetryMode = void 0; (function (RetryMode) { + /** + * Currently supported retry mode. + * Each time a retry happens, it will take exponentially more time than the last time. + */ RetryMode[RetryMode["Exponential"] = 0] = "Exponential"; })(exports.RetryMode || (exports.RetryMode = {})); const DefaultRetryOptions = { maxRetries: DEFAULT_CLIENT_RETRY_COUNT, retryDelayInMs: DEFAULT_CLIENT_RETRY_INTERVAL, - maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL + maxRetryDelayInMs: DEFAULT_CLIENT_MAX_RETRY_INTERVAL, }; /** * Instantiates a new "ExponentialRetryPolicyFilter" instance. @@ -22482,11 +22625,11 @@ class ExponentialRetryPolicy extends BaseRequestPolicy { sendRequest(request) { return this._nextPolicy .sendRequest(request.clone()) - .then((response) => retry(this, request, response)) - .catch((error) => retry(this, request, error.response, undefined, error)); + .then((response) => retry$1(this, request, response)) + .catch((error) => retry$1(this, request, error.response, undefined, error)); } } -async function retry(policy, request, response, retryData, requestError) { +async function retry$1(policy, request, response, retryData, requestError) { function shouldPolicyRetry(responseParam) { const statusCode = responseParam === null || responseParam === void 0 ? void 0 : responseParam.status; if (statusCode === 503 && (response === null || response === void 0 ? void 0 : response.headers.get(Constants.HeaderConstants.RETRY_AFTER))) { @@ -22503,7 +22646,7 @@ async function retry(policy, request, response, retryData, requestError) { retryData = updateRetryData({ retryInterval: policy.retryInterval, minRetryInterval: 0, - maxRetryInterval: policy.maxRetryInterval + maxRetryInterval: policy.maxRetryInterval, }, retryData, requestError); const isAborted = request.abortSignal && request.abortSignal.aborted; if (!isAborted && shouldRetry(policy.retryCount, shouldPolicyRetry, retryData, response)) { @@ -22511,10 +22654,10 @@ async function retry(policy, request, response, retryData, requestError) { try { await delay(retryData.retryInterval); const res = await policy._nextPolicy.sendRequest(request.clone()); - return retry(policy, request, res, retryData); + return retry$1(policy, request, res, retryData); } catch (err) { - return retry(policy, request, response, retryData, err); + return retry$1(policy, request, response, retryData, err); } } else if (isAborted || requestError || !response) { @@ -22529,11 +22672,467 @@ async function retry(policy, request, response, retryData, requestError) { } // Copyright (c) Microsoft Corporation. +/** + * Creates a policy that logs information about the outgoing request and the incoming responses. + * @param loggingOptions - Logging options. + * @returns An instance of the {@link LogPolicy} + */ +function logPolicy(loggingOptions = {}) { + return { + create: (nextPolicy, options) => { + return new LogPolicy(nextPolicy, options, loggingOptions); + }, + }; +} +/** + * A policy that logs information about the outgoing request and the incoming responses. + */ +class LogPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, { logger: logger$1 = logger.info, allowedHeaderNames = [], allowedQueryParameters = [], } = {}) { + super(nextPolicy, options); + this.logger = logger$1; + this.sanitizer = new Sanitizer({ allowedHeaderNames, allowedQueryParameters }); + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + get allowedHeaderNames() { + return this.sanitizer.allowedHeaderNames; + } + /** + * Header names whose values will be logged when logging is enabled. Defaults to + * Date, traceparent, x-ms-client-request-id, and x-ms-request id. Any headers + * specified in this field will be added to that list. Any other values will + * be written to logs as "REDACTED". + * @deprecated Pass these into the constructor instead. + */ + set allowedHeaderNames(allowedHeaderNames) { + this.sanitizer.allowedHeaderNames = allowedHeaderNames; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + get allowedQueryParameters() { + return this.sanitizer.allowedQueryParameters; + } + /** + * Query string names whose values will be logged when logging is enabled. By default no + * query string values are logged. + * @deprecated Pass these into the constructor instead. + */ + set allowedQueryParameters(allowedQueryParameters) { + this.sanitizer.allowedQueryParameters = allowedQueryParameters; + } + sendRequest(request) { + if (!this.logger.enabled) + return this._nextPolicy.sendRequest(request); + this.logRequest(request); + return this._nextPolicy.sendRequest(request).then((response) => this.logResponse(response)); + } + logRequest(request) { + this.logger(`Request: ${this.sanitizer.sanitize(request)}`); + } + logResponse(response) { + this.logger(`Response status code: ${response.status}`); + this.logger(`Headers: ${this.sanitizer.sanitize(response.headers)}`); + return response; + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * Get the path to this parameter's value as a dotted string (a.b.c). + * @param parameter - The parameter to get the path string for. + * @returns The path to this parameter's value as a dotted string. + */ +function getPathStringFromParameter(parameter) { + return getPathStringFromParameterPath(parameter.parameterPath, parameter.mapper); +} +function getPathStringFromParameterPath(parameterPath, mapper) { + let result; + if (typeof parameterPath === "string") { + result = parameterPath; + } + else if (Array.isArray(parameterPath)) { + result = parameterPath.join("."); + } + else { + result = mapper.serializedName; + } + return result; +} + +// Copyright (c) Microsoft Corporation. +/** + * Gets the list of status codes for streaming responses. + * @internal + */ +function getStreamResponseStatusCodes(operationSpec) { + const result = new Set(); + for (const statusCode in operationSpec.responses) { + const operationResponse = operationSpec.responses[statusCode]; + if (operationResponse.bodyMapper && + operationResponse.bodyMapper.type.name === MapperType.Stream) { + result.add(Number(statusCode)); + } + } + return result; +} + +// Copyright (c) Microsoft Corporation. +function getDefaultUserAgentKey() { + return Constants.HeaderConstants.USER_AGENT; +} +function getPlatformSpecificData() { + const runtimeInfo = { + key: "Node", + value: process.version, + }; + const osInfo = { + key: "OS", + value: `(${os__namespace.arch()}-${os__namespace.type()}-${os__namespace.release()})`, + }; + return [runtimeInfo, osInfo]; +} + +// Copyright (c) Microsoft Corporation. +function getRuntimeInfo() { + const msRestRuntime = { + key: "core-http", + value: Constants.coreHttpVersion, + }; + return [msRestRuntime]; +} +function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { + return telemetryInfo + .map((info) => { + const value = info.value ? `${valueSeparator}${info.value}` : ""; + return `${info.key}${value}`; + }) + .join(keySeparator); +} +const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; +/** + * The default approach to generate user agents. + * Uses static information from this package, plus system information available from the runtime. + */ +function getDefaultUserAgentValue() { + const runtimeInfo = getRuntimeInfo(); + const platformSpecificData = getPlatformSpecificData(); + const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); + return userAgent; +} +/** + * Returns a policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + * @param userAgentData - Telemetry information. + * @returns A new {@link UserAgentPolicy}. + */ +function userAgentPolicy(userAgentData) { + const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null + ? getDefaultUserAgentKey() + : userAgentData.key; + const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null + ? getDefaultUserAgentValue() + : userAgentData.value; + return { + create: (nextPolicy, options) => { + return new UserAgentPolicy(nextPolicy, options, key, value); + }, + }; +} +/** + * A policy that adds the user agent header to outgoing requests based on the given {@link TelemetryInfo}. + */ +class UserAgentPolicy extends BaseRequestPolicy { + constructor(_nextPolicy, _options, headerKey, headerValue) { + super(_nextPolicy, _options); + this._nextPolicy = _nextPolicy; + this._options = _options; + this.headerKey = headerKey; + this.headerValue = headerValue; + } + sendRequest(request) { + this.addUserAgentHeader(request); + return this._nextPolicy.sendRequest(request); + } + /** + * Adds the user agent header to the outgoing request. + */ + addUserAgentHeader(request) { + if (!request.headers) { + request.headers = new HttpHeaders(); + } + if (!request.headers.get(this.headerKey) && this.headerValue) { + request.headers.set(this.headerKey, this.headerValue); + } + } +} + +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. +/** + * The format that will be used to join an array of values together for a query parameter value. + */ +exports.QueryCollectionFormat = void 0; +(function (QueryCollectionFormat) { + /** + * CSV: Each pair of segments joined by a single comma. + */ + QueryCollectionFormat["Csv"] = ","; + /** + * SSV: Each pair of segments joined by a single space character. + */ + QueryCollectionFormat["Ssv"] = " "; + /** + * TSV: Each pair of segments joined by a single tab character. + */ + QueryCollectionFormat["Tsv"] = "\t"; + /** + * Pipes: Each pair of segments joined by a single pipe character. + */ + QueryCollectionFormat["Pipes"] = "|"; + /** + * Denotes this is an array of values that should be passed to the server in multiple key/value pairs, e.g. `?queryParam=value1&queryParam=value2` + */ + QueryCollectionFormat["Multi"] = "Multi"; +})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); + +// Copyright (c) Microsoft Corporation. +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function bearerTokenAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + const getToken = createTokenCycler(credential, scopes /* , options */); + class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const { token } = await getToken({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + }); + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return { + create: (nextPolicy, options) => { + return new BearerTokenAuthenticationPolicy(nextPolicy, options); + }, + }; +} + +// Copyright (c) Microsoft Corporation. +/** + * Returns a request policy factory that can be used to create an instance of + * {@link DisableResponseDecompressionPolicy}. + */ +function disableResponseDecompressionPolicy() { + return { + create: (nextPolicy, options) => { + return new DisableResponseDecompressionPolicy(nextPolicy, options); + }, + }; +} +/** + * A policy to disable response decompression according to Accept-Encoding header + * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding + */ +class DisableResponseDecompressionPolicy extends BaseRequestPolicy { + /** + * Creates an instance of DisableResponseDecompressionPolicy. + * + * @param nextPolicy - + * @param options - + */ + // The parent constructor is protected. + /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + /** + * Sends out request. + * + * @param request - + * @returns + */ + async sendRequest(request) { + request.decompressResponse = false; + return this._nextPolicy.sendRequest(request); + } +} + +// Copyright (c) Microsoft Corporation. +/** + * Creates a policy that assigns a unique request id to outgoing requests. + * @param requestIdHeaderName - The name of the header to use when assigning the unique id to the request. + */ function generateClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { return { create: (nextPolicy, options) => { return new GenerateClientRequestIdPolicy(nextPolicy, options, requestIdHeaderName); - } + }, }; } class GenerateClientRequestIdPolicy extends BaseRequestPolicy { @@ -22550,130 +23149,190 @@ class GenerateClientRequestIdPolicy extends BaseRequestPolicy { } // Copyright (c) Microsoft Corporation. -function getDefaultUserAgentKey() { - return Constants.HeaderConstants.USER_AGENT; -} -function getPlatformSpecificData() { - const runtimeInfo = { - key: "Node", - value: process.version - }; - const osInfo = { - key: "OS", - value: `(${os.arch()}-${os.type()}-${os.release()})` - }; - return [runtimeInfo, osInfo]; +let cachedHttpClient; +function getCachedDefaultHttpClient() { + if (!cachedHttpClient) { + cachedHttpClient = new NodeFetchHttpClient(); + } + return cachedHttpClient; } // Copyright (c) Microsoft Corporation. -function getRuntimeInfo() { - const msRestRuntime = { - key: "core-http", - value: Constants.coreHttpVersion - }; - return [msRestRuntime]; -} -function getUserAgentString(telemetryInfo, keySeparator = " ", valueSeparator = "/") { - return telemetryInfo - .map((info) => { - const value = info.value ? `${valueSeparator}${info.value}` : ""; - return `${info.key}${value}`; - }) - .join(keySeparator); -} -const getDefaultUserAgentHeaderName = getDefaultUserAgentKey; -function getDefaultUserAgentValue() { - const runtimeInfo = getRuntimeInfo(); - const platformSpecificData = getPlatformSpecificData(); - const userAgent = getUserAgentString(runtimeInfo.concat(platformSpecificData)); - return userAgent; -} -function userAgentPolicy(userAgentData) { - const key = !userAgentData || userAgentData.key === undefined || userAgentData.key === null - ? getDefaultUserAgentKey() - : userAgentData.key; - const value = !userAgentData || userAgentData.value === undefined || userAgentData.value === null - ? getDefaultUserAgentValue() - : userAgentData.value; +function ndJsonPolicy() { return { create: (nextPolicy, options) => { - return new UserAgentPolicy(nextPolicy, options, key, value); - } + return new NdJsonPolicy(nextPolicy, options); + }, }; } -class UserAgentPolicy extends BaseRequestPolicy { - constructor(_nextPolicy, _options, headerKey, headerValue) { - super(_nextPolicy, _options); - this._nextPolicy = _nextPolicy; - this._options = _options; - this.headerKey = headerKey; - this.headerValue = headerValue; +/** + * NdJsonPolicy that formats a JSON array as newline-delimited JSON + */ +class NdJsonPolicy extends BaseRequestPolicy { + /** + * Creates an instance of KeepAlivePolicy. + */ + constructor(nextPolicy, options) { + super(nextPolicy, options); } - sendRequest(request) { - this.addUserAgentHeader(request); + /** + * Sends a request. + */ + async sendRequest(request) { + // There currently isn't a good way to bypass the serializer + if (typeof request.body === "string" && request.body.startsWith("[")) { + const body = JSON.parse(request.body); + if (Array.isArray(body)) { + request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); + } + } return this._nextPolicy.sendRequest(request); } - addUserAgentHeader(request) { - if (!request.headers) { - request.headers = new HttpHeaders(); - } - if (!request.headers.get(this.headerKey) && this.headerValue) { - request.headers.set(this.headerKey, this.headerValue); - } - } } // Copyright (c) Microsoft Corporation. /** - * Methods that are allowed to follow redirects 301 and 302 + * Stores the patterns specified in NO_PROXY environment variable. + * @internal */ -const allowedRedirect = ["GET", "HEAD"]; -const DefaultRedirectOptions = { - handleRedirects: true, - maxRetries: 20 -}; -function redirectPolicy(maximumRetries = 20) { - return { - create: (nextPolicy, options) => { - return new RedirectPolicy(nextPolicy, options, maximumRetries); +const globalNoProxyList = []; +let noProxyListLoaded = false; +/** A cache of whether a host should bypass the proxy. */ +const globalBypassedMap = new Map(); +function loadEnvironmentProxyValue() { + if (!process) { + return undefined; + } + const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); + const allProxy = getEnvironmentValue(Constants.ALL_PROXY); + const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); + return httpsProxy || allProxy || httpProxy; +} +/** + * Check whether the host of a given `uri` matches any pattern in the no proxy list. + * If there's a match, any request sent to the same host shouldn't have the proxy settings set. + * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 + */ +function isBypassed(uri, noProxyList, bypassedMap) { + if (noProxyList.length === 0) { + return false; + } + const host = URLBuilder.parse(uri).getHost(); + if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { + return bypassedMap.get(host); + } + let isBypassedFlag = false; + for (const pattern of noProxyList) { + if (pattern[0] === ".") { + // This should match either domain it self or any subdomain or host + // .foo.com will match foo.com it self or *.foo.com + if (host.endsWith(pattern)) { + isBypassedFlag = true; + } + else { + if (host.length === pattern.length - 1 && host === pattern.slice(1)) { + isBypassedFlag = true; + } + } } + else { + if (host === pattern) { + isBypassedFlag = true; + } + } + } + bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); + return isBypassedFlag; +} +/** + * @internal + */ +function loadNoProxy() { + const noProxy = getEnvironmentValue(Constants.NO_PROXY); + noProxyListLoaded = true; + if (noProxy) { + return noProxy + .split(",") + .map((item) => item.trim()) + .filter((item) => item.length); + } + return []; +} +/** + * Converts a given URL of a proxy server into `ProxySettings` or attempts to retrieve `ProxySettings` from the current environment if one is not passed. + * @param proxyUrl - URL of the proxy + * @returns The default proxy settings, or undefined. + */ +function getDefaultProxySettings(proxyUrl) { + if (!proxyUrl) { + proxyUrl = loadEnvironmentProxyValue(); + if (!proxyUrl) { + return undefined; + } + } + const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); + const parsedUrl = URLBuilder.parse(urlWithoutAuth); + const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; + return { + host: schema + parsedUrl.getHost(), + port: Number.parseInt(parsedUrl.getPort() || "80"), + username, + password, }; } -class RedirectPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, maxRetries = 20) { +/** + * A policy that allows one to apply proxy settings to all requests. + * If not passed static settings, they will be retrieved from the HTTPS_PROXY + * or HTTP_PROXY environment variables. + * @param proxySettings - ProxySettings to use on each request. + * @param options - additional settings, for example, custom NO_PROXY patterns + */ +function proxyPolicy(proxySettings, options) { + if (!proxySettings) { + proxySettings = getDefaultProxySettings(); + } + if (!noProxyListLoaded) { + globalNoProxyList.push(...loadNoProxy()); + } + return { + create: (nextPolicy, requestPolicyOptions) => { + return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); + }, + }; +} +function extractAuthFromUrl(url) { + const atIndex = url.indexOf("@"); + if (atIndex === -1) { + return { urlWithoutAuth: url }; + } + const schemeIndex = url.indexOf("://"); + const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; + const auth = url.substring(authStart, atIndex); + const colonIndex = auth.indexOf(":"); + const hasPassword = colonIndex !== -1; + const username = hasPassword ? auth.substring(0, colonIndex) : auth; + const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; + const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); + return { + username, + password, + urlWithoutAuth, + }; +} +class ProxyPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, proxySettings, customNoProxyList) { super(nextPolicy, options); - this.maxRetries = maxRetries; + this.proxySettings = proxySettings; + this.customNoProxyList = customNoProxyList; } sendRequest(request) { - return this._nextPolicy - .sendRequest(request) - .then((response) => handleRedirect(this, response, 0)); - } -} -function handleRedirect(policy, response, currentRetries) { - const { request, status } = response; - const locationHeader = response.headers.get("location"); - if (locationHeader && - (status === 300 || - (status === 301 && allowedRedirect.includes(request.method)) || - (status === 302 && allowedRedirect.includes(request.method)) || - (status === 303 && request.method === "POST") || - status === 307) && - (!policy.maxRetries || currentRetries < policy.maxRetries)) { - const builder = URLBuilder.parse(request.url); - builder.setPath(locationHeader); - request.url = builder.toString(); - // POST request with Status code 303 should be converted into a - // redirected GET request if the redirect url is present in the location header - if (status === 303) { - request.method = "GET"; - delete request.body; + var _a; + if (!request.proxySettings && + !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { + request.proxySettings = this.proxySettings; } - return policy._nextPolicy - .sendRequest(request) - .then((res) => handleRedirect(policy, res, currentRetries + 1)); + return this._nextPolicy.sendRequest(request); } - return Promise.resolve(response); } // Copyright (c) Microsoft Corporation. @@ -22681,7 +23340,7 @@ function rpRegistrationPolicy(retryTimeout = 30) { return { create: (nextPolicy, options) => { return new RPRegistrationPolicy(nextPolicy, options, retryTimeout); - } + }, }; } class RPRegistrationPolicy extends BaseRequestPolicy { @@ -22826,193 +23485,52 @@ async function getRegistrationStatus(policy, url, originalRequest) { } // Copyright (c) Microsoft Corporation. -// Default options for the cycler if none are provided -const DEFAULT_CYCLER_OPTIONS = { - forcedRefreshWindowInMs: 1000, - retryIntervalInMs: 3000, - refreshWindowInMs: 1000 * 60 * 2 // Start refreshing 2m before expiry -}; /** - * Converts an an unreliable access token getter (which may resolve with null) - * into an AccessTokenGetter by retrying the unreliable getter in a regular - * interval. - * - * @param getAccessToken - a function that produces a promise of an access - * token that may fail by returning null - * @param retryIntervalInMs - the time (in milliseconds) to wait between retry - * attempts - * @param timeoutInMs - the timestamp after which the refresh attempt will fail, - * throwing an exception - * @returns - a promise that, if it resolves, will resolve with an access token + * Creates a policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + * @param authenticationProvider - The authentication provider. + * @returns An instance of the {@link SigningPolicy}. */ -async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { - // This wrapper handles exceptions gracefully as long as we haven't exceeded - // the timeout. - async function tryGetAccessToken() { - if (Date.now() < timeoutInMs) { - try { - return await getAccessToken(); - } - catch (_a) { - return null; - } - } - else { - const finalToken = await getAccessToken(); - // Timeout is up, so throw if it's still null - if (finalToken === null) { - throw new Error("Failed to refresh access token."); - } - return finalToken; - } - } - let token = await tryGetAccessToken(); - while (token === null) { - await delay(retryIntervalInMs); - token = await tryGetAccessToken(); - } - return token; -} -/** - * Creates a token cycler from a credential, scopes, and optional settings. - * - * A token cycler represents a way to reliably retrieve a valid access token - * from a TokenCredential. It will handle initializing the token, refreshing it - * when it nears expiration, and synchronizes refresh attempts to avoid - * concurrency hazards. - * - * @param credential - the underlying TokenCredential that provides the access - * token - * @param scopes - the scopes to request authorization for - * @param tokenCyclerOptions - optionally override default settings for the cycler - * - * @returns - a function that reliably produces a valid access token - */ -function createTokenCycler(credential, scopes, tokenCyclerOptions) { - let refreshWorker = null; - let token = null; - const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); - /** - * This little holder defines several predicates that we use to construct - * the rules of refreshing the token. - */ - const cycler = { - /** - * Produces true if a refresh job is currently in progress. - */ - get isRefreshing() { - return refreshWorker !== null; - }, - /** - * Produces true if the cycler SHOULD refresh (we are within the refresh - * window and not already refreshing) - */ - get shouldRefresh() { - var _a; - return (!cycler.isRefreshing && - ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); - }, - /** - * Produces true if the cycler MUST refresh (null or nearly-expired - * token). - */ - get mustRefresh() { - return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); - } - }; - /** - * Starts a refresh job or returns the existing job if one is already - * running. - */ - function refresh(getTokenOptions) { - var _a; - if (!cycler.isRefreshing) { - // We bind `scopes` here to avoid passing it around a lot - const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); - // Take advantage of promise chaining to insert an assignment to `token` - // before the refresh can be considered done. - refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, - // If we don't have a token, then we should timeout immediately - (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) - .then((_token) => { - refreshWorker = null; - token = _token; - return token; - }) - .catch((reason) => { - // We also should reset the refresher if we enter a failed state. All - // existing awaiters will throw, but subsequent requests will start a - // new retry chain. - refreshWorker = null; - token = null; - throw reason; - }); - } - return refreshWorker; - } - return async (tokenOptions) => { - // - // Simple rules: - // - If we MUST refresh, then return the refresh task, blocking - // the pipeline until a token is available. - // - If we SHOULD refresh, then run refresh but don't return it - // (we can still use the cached token). - // - Return the token, since it's fine if we didn't return in - // step 1. - // - if (cycler.mustRefresh) - return refresh(tokenOptions); - if (cycler.shouldRefresh) { - refresh(tokenOptions); - } - return token; - }; -} -// #endregion -/** - * Creates a new factory for a RequestPolicy that applies a bearer token to - * the requests' `Authorization` headers. - * - * @param credential - The TokenCredential implementation that can supply the bearer token. - * @param scopes - The scopes for which the bearer token applies. - */ -function bearerTokenAuthenticationPolicy(credential, scopes) { - // This simple function encapsulates the entire process of reliably retrieving the token - const getToken = createTokenCycler(credential, scopes /* , options */); - class BearerTokenAuthenticationPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - async sendRequest(webResource) { - if (!webResource.url.toLowerCase().startsWith("https://")) { - throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); - } - const { token } = await getToken({ - abortSignal: webResource.abortSignal, - tracingOptions: { - tracingContext: webResource.tracingContext - } - }); - webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); - return this._nextPolicy.sendRequest(webResource); - } - } +function signingPolicy(authenticationProvider) { return { create: (nextPolicy, options) => { - return new BearerTokenAuthenticationPolicy(nextPolicy, options); - } + return new SigningPolicy(nextPolicy, options, authenticationProvider); + }, }; } +/** + * A policy that signs outgoing requests by calling to the provided `authenticationProvider`'s `signRequest` method. + */ +class SigningPolicy extends BaseRequestPolicy { + constructor(nextPolicy, options, authenticationProvider) { + super(nextPolicy, options); + this.authenticationProvider = authenticationProvider; + } + signRequest(request) { + return this.authenticationProvider.signRequest(request); + } + sendRequest(request) { + return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); + } +} // Copyright (c) Microsoft Corporation. +/** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". + * @param retryCount - Maximum number of retries. + * @param retryInterval - The client retry interval, in milliseconds. + * @param minRetryInterval - The minimum retry interval, in milliseconds. + * @param maxRetryInterval - The maximum retry interval, in milliseconds. + * @returns An instance of the {@link SystemErrorRetryPolicy} + */ function systemErrorRetryPolicy(retryCount, retryInterval, minRetryInterval, maxRetryInterval) { return { create: (nextPolicy, options) => { return new SystemErrorRetryPolicy(nextPolicy, options, retryCount, retryInterval, minRetryInterval, maxRetryInterval); - } + }, }; } /** + * A policy that retries when there's a system error, identified by the codes "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET" or "ENOENT". * @param retryCount - The client retry count. * @param retryInterval - The client retry interval, in milliseconds. * @param minRetryInterval - The minimum retry interval, in milliseconds. @@ -23033,10 +23551,10 @@ class SystemErrorRetryPolicy extends BaseRequestPolicy { sendRequest(request) { return this._nextPolicy .sendRequest(request.clone()) - .catch((error) => retry$1(this, request, error.response, error)); + .catch((error) => retry(this, request, error.response, error)); } } -async function retry$1(policy, request, operationResponse, err, retryData) { +async function retry(policy, request, operationResponse, err, retryData) { retryData = updateRetryData(policy, retryData, err); function shouldPolicyRetry(_response, error) { if (error && @@ -23057,7 +23575,7 @@ async function retry$1(policy, request, operationResponse, err, retryData) { return policy._nextPolicy.sendRequest(request.clone()); } catch (nestedErr) { - return retry$1(policy, request, operationResponse, nestedErr, retryData); + return retry(policy, request, operationResponse, nestedErr, retryData); } } else { @@ -23069,155 +23587,6 @@ async function retry$1(policy, request, operationResponse, err, retryData) { } } -// Copyright (c) Microsoft Corporation. -(function (QueryCollectionFormat) { - QueryCollectionFormat["Csv"] = ","; - QueryCollectionFormat["Ssv"] = " "; - QueryCollectionFormat["Tsv"] = "\t"; - QueryCollectionFormat["Pipes"] = "|"; - QueryCollectionFormat["Multi"] = "Multi"; -})(exports.QueryCollectionFormat || (exports.QueryCollectionFormat = {})); - -// Copyright (c) Microsoft Corporation. -/** - * Stores the patterns specified in NO_PROXY environment variable. - * @internal - */ -const globalNoProxyList = []; -let noProxyListLoaded = false; -/** A cache of whether a host should bypass the proxy. */ -const globalBypassedMap = new Map(); -function loadEnvironmentProxyValue() { - if (!process) { - return undefined; - } - const httpsProxy = getEnvironmentValue(Constants.HTTPS_PROXY); - const allProxy = getEnvironmentValue(Constants.ALL_PROXY); - const httpProxy = getEnvironmentValue(Constants.HTTP_PROXY); - return httpsProxy || allProxy || httpProxy; -} -/** - * Check whether the host of a given `uri` matches any pattern in the no proxy list. - * If there's a match, any request sent to the same host shouldn't have the proxy settings set. - * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 - */ -function isBypassed(uri, noProxyList, bypassedMap) { - if (noProxyList.length === 0) { - return false; - } - const host = URLBuilder.parse(uri).getHost(); - if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { - return bypassedMap.get(host); - } - let isBypassedFlag = false; - for (const pattern of noProxyList) { - if (pattern[0] === ".") { - // This should match either domain it self or any subdomain or host - // .foo.com will match foo.com it self or *.foo.com - if (host.endsWith(pattern)) { - isBypassedFlag = true; - } - else { - if (host.length === pattern.length - 1 && host === pattern.slice(1)) { - isBypassedFlag = true; - } - } - } - else { - if (host === pattern) { - isBypassedFlag = true; - } - } - } - bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); - return isBypassedFlag; -} -/** - * @internal - */ -function loadNoProxy() { - const noProxy = getEnvironmentValue(Constants.NO_PROXY); - noProxyListLoaded = true; - if (noProxy) { - return noProxy - .split(",") - .map((item) => item.trim()) - .filter((item) => item.length); - } - return []; -} -function getDefaultProxySettings(proxyUrl) { - if (!proxyUrl) { - proxyUrl = loadEnvironmentProxyValue(); - if (!proxyUrl) { - return undefined; - } - } - const { username, password, urlWithoutAuth } = extractAuthFromUrl(proxyUrl); - const parsedUrl = URLBuilder.parse(urlWithoutAuth); - const schema = parsedUrl.getScheme() ? parsedUrl.getScheme() + "://" : ""; - return { - host: schema + parsedUrl.getHost(), - port: Number.parseInt(parsedUrl.getPort() || "80"), - username, - password - }; -} -/** - * A policy that allows one to apply proxy settings to all requests. - * If not passed static settings, they will be retrieved from the HTTPS_PROXY - * or HTTP_PROXY environment variables. - * @param proxySettings - ProxySettings to use on each request. - * @param options - additional settings, for example, custom NO_PROXY patterns - */ -function proxyPolicy(proxySettings, options) { - if (!proxySettings) { - proxySettings = getDefaultProxySettings(); - } - if (!noProxyListLoaded) { - globalNoProxyList.push(...loadNoProxy()); - } - return { - create: (nextPolicy, requestPolicyOptions) => { - return new ProxyPolicy(nextPolicy, requestPolicyOptions, proxySettings, options === null || options === void 0 ? void 0 : options.customNoProxyList); - } - }; -} -function extractAuthFromUrl(url) { - const atIndex = url.indexOf("@"); - if (atIndex === -1) { - return { urlWithoutAuth: url }; - } - const schemeIndex = url.indexOf("://"); - const authStart = schemeIndex !== -1 ? schemeIndex + 3 : 0; - const auth = url.substring(authStart, atIndex); - const colonIndex = auth.indexOf(":"); - const hasPassword = colonIndex !== -1; - const username = hasPassword ? auth.substring(0, colonIndex) : auth; - const password = hasPassword ? auth.substring(colonIndex + 1) : undefined; - const urlWithoutAuth = url.substring(0, authStart) + url.substring(atIndex + 1); - return { - username, - password, - urlWithoutAuth - }; -} -class ProxyPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, proxySettings, customNoProxyList) { - super(nextPolicy, options); - this.proxySettings = proxySettings; - this.customNoProxyList = customNoProxyList; - } - sendRequest(request) { - var _a; - if (!request.proxySettings && - !isBypassed(request.url, (_a = this.customNoProxyList) !== null && _a !== void 0 ? _a : globalNoProxyList, this.customNoProxyList ? undefined : globalBypassedMap)) { - request.proxySettings = this.proxySettings; - } - return this._nextPolicy.sendRequest(request); - } -} - // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /** @@ -23227,15 +23596,28 @@ const DEFAULT_CLIENT_MAX_RETRY_COUNT = 3; // Copyright (c) Microsoft Corporation. const StatusCodes = Constants.HttpConstants.StatusCodes; +/** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * + * To learn more, please refer to + * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, + * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and + * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors + * @returns + */ function throttlingRetryPolicy() { return { create: (nextPolicy, options) => { return new ThrottlingRetryPolicy(nextPolicy, options); - } + }, }; } -const StandardAbortMessage$1 = "The operation was aborted."; +const StandardAbortMessage = "The operation was aborted."; /** + * Creates a policy that re-sends the request if the response indicates the request failed because of throttling reasons. + * For example, if the response contains a `Retry-After` header, it will retry sending the request based on the value of that header. + * * To learn more, please refer to * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and @@ -23266,10 +23648,10 @@ class ThrottlingRetryPolicy extends BaseRequestPolicy { this.numberOfRetries += 1; await delay(delayInMs, undefined, { abortSignal: httpRequest.abortSignal, - abortErrorMsg: StandardAbortMessage$1 + abortErrorMsg: StandardAbortMessage, }); if ((_a = httpRequest.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { - throw new abortController.AbortError(StandardAbortMessage$1); + throw new abortController.AbortError(StandardAbortMessage); } if (this.numberOfRetries < DEFAULT_CLIENT_MAX_RETRY_COUNT) { return this.sendRequest(httpRequest); @@ -23303,77 +23685,26 @@ class ThrottlingRetryPolicy extends BaseRequestPolicy { } } -// Copyright (c) Microsoft Corporation. -function signingPolicy(authenticationProvider) { - return { - create: (nextPolicy, options) => { - return new SigningPolicy(nextPolicy, options, authenticationProvider); - } - }; -} -class SigningPolicy extends BaseRequestPolicy { - constructor(nextPolicy, options, authenticationProvider) { - super(nextPolicy, options); - this.authenticationProvider = authenticationProvider; - } - signRequest(request) { - return this.authenticationProvider.signRequest(request); - } - sendRequest(request) { - return this.signRequest(request).then((nextRequest) => this._nextPolicy.sendRequest(nextRequest)); - } -} - -// Copyright (c) Microsoft Corporation. -const DefaultKeepAliveOptions = { - enable: true -}; -function keepAlivePolicy(keepAliveOptions) { - return { - create: (nextPolicy, options) => { - return new KeepAlivePolicy(nextPolicy, options, keepAliveOptions || DefaultKeepAliveOptions); - } - }; -} -/** - * KeepAlivePolicy is a policy used to control keep alive settings for every request. - */ -class KeepAlivePolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - * - * @param nextPolicy - - * @param options - - * @param keepAliveOptions - - */ - constructor(nextPolicy, options, keepAliveOptions) { - super(nextPolicy, options); - this.keepAliveOptions = keepAliveOptions; - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.keepAlive = this.keepAliveOptions.enable; - return this._nextPolicy.sendRequest(request); - } -} - // Copyright (c) Microsoft Corporation. const createSpan = coreTracing.createSpanFunction({ packagePrefix: "", - namespace: "" + namespace: "", }); +/** + * Creates a policy that wraps outgoing requests with a tracing span. + * @param tracingOptions - Tracing options. + * @returns An instance of the {@link TracingPolicy} class. + */ function tracingPolicy(tracingOptions = {}) { return { create(nextPolicy, options) { return new TracingPolicy(nextPolicy, options, tracingOptions); - } + }, }; } +/** + * A policy that wraps outgoing requests with a tracing span. + */ class TracingPolicy extends BaseRequestPolicy { constructor(nextPolicy, options, tracingOptions) { super(nextPolicy, options); @@ -23400,14 +23731,13 @@ class TracingPolicy extends BaseRequestPolicy { tryCreateSpan(request) { var _a; try { - const path = URLBuilder.parse(request.url).getPath() || "/"; // Passing spanOptions as part of tracingOptions to maintain compatibility @azure/core-tracing@preview.13 and earlier. // We can pass this as a separate parameter once we upgrade to the latest core-tracing. - const { span } = createSpan(path, { + const { span } = createSpan(`HTTP ${request.method}`, { tracingOptions: { spanOptions: Object.assign(Object.assign({}, request.spanOptions), { kind: coreTracing.SpanKind.CLIENT }), - tracingContext: request.tracingContext - } + tracingContext: request.tracingContext, + }, }); // If the span is not recording, don't do any more work. if (!span.isRecording()) { @@ -23421,7 +23751,7 @@ class TracingPolicy extends BaseRequestPolicy { span.setAttributes({ "http.method": request.method, "http.url": request.url, - requestId: request.requestId + requestId: request.requestId, }); if (this.userAgent) { span.setAttribute("http.user_agent", this.userAgent); @@ -23448,7 +23778,7 @@ class TracingPolicy extends BaseRequestPolicy { try { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: err.message + message: err.message, }); if (err.statusCode) { span.setAttribute("http.status_code", err.statusCode); @@ -23467,7 +23797,7 @@ class TracingPolicy extends BaseRequestPolicy { span.setAttribute("serviceRequestId", serviceRequestId); } span.setStatus({ - code: coreTracing.SpanStatusCode.OK + code: coreTracing.SpanStatusCode.OK, }); span.end(); } @@ -23477,88 +23807,6 @@ class TracingPolicy extends BaseRequestPolicy { } } -// Copyright (c) Microsoft Corporation. -/** - * Returns a request policy factory that can be used to create an instance of - * {@link DisableResponseDecompressionPolicy}. - */ -function disableResponseDecompressionPolicy() { - return { - create: (nextPolicy, options) => { - return new DisableResponseDecompressionPolicy(nextPolicy, options); - } - }; -} -/** - * A policy to disable response decompression according to Accept-Encoding header - * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding - */ -class DisableResponseDecompressionPolicy extends BaseRequestPolicy { - /** - * Creates an instance of DisableResponseDecompressionPolicy. - * - * @param nextPolicy - - * @param options - - */ - // The parent constructor is protected. - /* eslint-disable-next-line @typescript-eslint/no-useless-constructor */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends out request. - * - * @param request - - * @returns - */ - async sendRequest(request) { - request.decompressResponse = false; - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -function ndJsonPolicy() { - return { - create: (nextPolicy, options) => { - return new NdJsonPolicy(nextPolicy, options); - } - }; -} -/** - * NdJsonPolicy that formats a JSON array as newline-delimited JSON - */ -class NdJsonPolicy extends BaseRequestPolicy { - /** - * Creates an instance of KeepAlivePolicy. - */ - constructor(nextPolicy, options) { - super(nextPolicy, options); - } - /** - * Sends a request. - */ - async sendRequest(request) { - // There currently isn't a good way to bypass the serializer - if (typeof request.body === "string" && request.body.startsWith("[")) { - const body = JSON.parse(request.body); - if (Array.isArray(body)) { - request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); - } - } - return this._nextPolicy.sendRequest(request); - } -} - -// Copyright (c) Microsoft Corporation. -let cachedHttpClient; -function getCachedDefaultHttpClient() { - if (!cachedHttpClient) { - cachedHttpClient = new NodeFetchHttpClient(); - } - return cachedHttpClient; -} - // Copyright (c) Microsoft Corporation. /** * ServiceClient sends service requests and receives responses. @@ -23608,7 +23856,7 @@ class ServiceClient { bearerTokenPolicyFactory = bearerTokenAuthenticationPolicy(credentials, credentialScopes); } return bearerTokenPolicyFactory.create(nextPolicy, createOptions); - } + }, }; }; authPolicyFactory = wrappedPolicyFactory(); @@ -23843,7 +24091,7 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op const updatedOptions = { rootName: (_c = serializerOptions.rootName) !== null && _c !== void 0 ? _c : "", includeRoot: (_d = serializerOptions.includeRoot) !== null && _d !== void 0 ? _d : false, - xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY + xmlCharKey: (_e = serializerOptions.xmlCharKey) !== null && _e !== void 0 ? _e : XML_CHARKEY, }; const xmlCharKey = serializerOptions.xmlCharKey; if (operationSpec.requestBody && operationSpec.requestBody.mapper) { @@ -23862,13 +24110,13 @@ function serializeRequestBody(serviceClient, httpRequest, operationArguments, op if (typeName === MapperType.Sequence) { httpRequest.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, - xmlCharKey + xmlCharKey, }); } else if (!isStream) { httpRequest.body = stringifyXML(value, { rootName: xmlName || serializedName, - xmlCharKey + xmlCharKey, }); } } @@ -23952,6 +24200,12 @@ function createDefaultRequestPolicyFactories(authPolicyFactory, options) { factories.push(logPolicy({ logger: logger.info })); return factories; } +/** + * Creates an HTTP pipeline based on the given options. + * @param pipelineOptions - Defines options that are used to configure policies in the HTTP pipeline for an SDK client. + * @param authPolicyFactory - An optional authentication policy factory to use for signing requests. + * @returns A set of options that can be passed to create a new {@link ServiceClient}. + */ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { const requestPolicyFactories = []; if (pipelineOptions.sendStreamingJson) { @@ -23990,7 +24244,7 @@ function createPipelineFromOptions(pipelineOptions, authPolicyFactory) { } return { httpClient: pipelineOptions.httpClient, - requestPolicyFactories + requestPolicyFactories, }; } function getOperationArgumentValueFromParameter(serviceClient, operationArguments, parameter, serializer) { @@ -24066,12 +24320,18 @@ function getPropertyFromParameterPath(parent, parameterPath) { } return result; } +/** + * Parses an {@link HttpOperationResponse} into a normalized HTTP response object ({@link RestResponse}). + * @param _response - Wrapper object for http response. + * @param responseSpec - Mappers for how to parse the response properties. + * @returns - A normalized response object. + */ function flattenResponse(_response, responseSpec) { const parsedHeaders = _response.parsedHeaders; const bodyMapper = responseSpec && responseSpec.bodyMapper; const addOperationResponse = (obj) => { return Object.defineProperty(obj, "_response", { - value: _response + value: _response, }); }; if (bodyMapper) { @@ -24157,9 +24417,16 @@ class ExpiringAccessTokenCache { this.cachedToken = undefined; this.tokenRefreshBufferMs = tokenRefreshBufferMs; } + /** + * Saves an access token into the internal in-memory cache. + * @param accessToken - Access token or undefined to clear the cache. + */ setCachedToken(accessToken) { this.cachedToken = accessToken; } + /** + * Returns the cached access token, or `undefined` if one is not cached or the cached one is expiring soon. + */ getCachedToken() { if (this.cachedToken && Date.now() + this.tokenRefreshBufferMs >= this.cachedToken.expiresOnTimestamp) { @@ -24218,6 +24485,9 @@ class AccessTokenRefresher { // Copyright (c) Microsoft Corporation. const HeaderConstants = Constants.HeaderConstants; const DEFAULT_AUTHORIZATION_SCHEME = "Basic"; +/** + * A simple {@link ServiceClientCredential} that authenticates with a username and a password. + */ class BasicAuthenticationCredentials { /** * Creates a new BasicAuthenticationCredentials object. @@ -24227,6 +24497,10 @@ class BasicAuthenticationCredentials { * @param authorizationScheme - The authorization scheme. */ constructor(userName, password, authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME) { + /** + * Authorization scheme. Defaults to "Basic". + * More information about authorization schemes is available here: https://developer.mozilla.org/docs/Web/HTTP/Authentication#authentication_schemes + */ this.authorizationScheme = DEFAULT_AUTHORIZATION_SCHEME; if (userName === null || userName === undefined || typeof userName.valueOf() !== "string") { throw new Error("userName cannot be null or undefined and must be of type string."); @@ -24306,6 +24580,9 @@ class ApiKeyCredentials { } // Copyright (c) Microsoft Corporation. +/** + * A {@link TopicCredentials} object used for Azure Event Grid. + */ class TopicCredentials extends ApiKeyCredentials { /** * Creates a new EventGrid TopicCredentials object. @@ -24318,8 +24595,8 @@ class TopicCredentials extends ApiKeyCredentials { } const options = { inHeader: { - "aeg-sas-key": topicKey - } + "aeg-sas-key": topicKey, + }, }; super(options); } @@ -24327,9 +24604,7 @@ class TopicCredentials extends ApiKeyCredentials { Object.defineProperty(exports, 'isTokenCredential', { enumerable: true, - get: function () { - return coreAuth.isTokenCredential; - } + get: function () { return coreAuth.isTokenCredential; } }); exports.AccessTokenRefresher = AccessTokenRefresher; exports.ApiKeyCredentials = ApiKeyCredentials; @@ -24971,12 +25246,7 @@ var SamplingDecision; /***/ }), /* 341 */, /* 342 */, -/* 343 */ -/***/ (function(module) { - -module.exports = require("timers"); - -/***/ }), +/* 343 */, /* 344 */ /***/ (function(__unusedmodule, exports) { @@ -25553,7 +25823,7 @@ var DiagLogLevel; "use strict"; -var punycode = __webpack_require__(213); +var punycode = __webpack_require__(815); var mappingTable = __webpack_require__(482); var PROCESSING_OPTIONS = { @@ -38633,7 +38903,7 @@ exports.LibericaDistributions = LibericaDistributions; Object.defineProperty(exports, '__esModule', { value: true }); -__webpack_require__(71); +__webpack_require__(48); var tslib = __webpack_require__(671); // Copyright (c) Microsoft Corporation. @@ -38656,7 +38926,7 @@ function getPagedAsyncIterator(pagedResult) { }, byPage: (_a = pagedResult === null || pagedResult === void 0 ? void 0 : pagedResult.byPage) !== null && _a !== void 0 ? _a : ((settings) => { return getPageAsyncIterator(pagedResult, settings === null || settings === void 0 ? void 0 : settings.maxPageSize); - }) + }), }; } function getItemAsyncIterator(pagedResult, maxPageSize) { @@ -47106,7 +47376,318 @@ Object.defineProperty(exports, "__esModule", { value: true }); //# sourceMappingURL=span_context.js.map /***/ }), -/* 608 */, +/* 608 */ +/***/ (function(module) { + +/*! ***************************************************************************** +Copyright (c) Microsoft Corporation. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. +***************************************************************************** */ +/* global global, define, System, Reflect, Promise */ +var __extends; +var __assign; +var __rest; +var __decorate; +var __param; +var __metadata; +var __awaiter; +var __generator; +var __exportStar; +var __values; +var __read; +var __spread; +var __spreadArrays; +var __spreadArray; +var __await; +var __asyncGenerator; +var __asyncDelegator; +var __asyncValues; +var __makeTemplateObject; +var __importStar; +var __importDefault; +var __classPrivateFieldGet; +var __classPrivateFieldSet; +var __createBinding; +(function (factory) { + var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; + if (typeof define === "function" && define.amd) { + define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); + } + else if ( true && typeof module.exports === "object") { + factory(createExporter(root, createExporter(module.exports))); + } + else { + factory(createExporter(root)); + } + function createExporter(exports, previous) { + if (exports !== root) { + if (typeof Object.create === "function") { + Object.defineProperty(exports, "__esModule", { value: true }); + } + else { + exports.__esModule = true; + } + } + return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; + } +}) +(function (exporter) { + var extendStatics = Object.setPrototypeOf || + ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || + function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; + + __extends = function (d, b) { + if (typeof b !== "function" && b !== null) + throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); + extendStatics(d, b); + function __() { this.constructor = d; } + d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); + }; + + __assign = Object.assign || function (t) { + for (var s, i = 1, n = arguments.length; i < n; i++) { + s = arguments[i]; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; + } + return t; + }; + + __rest = function (s, e) { + var t = {}; + for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) + t[p] = s[p]; + if (s != null && typeof Object.getOwnPropertySymbols === "function") + for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { + if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) + t[p[i]] = s[p[i]]; + } + return t; + }; + + __decorate = function (decorators, target, key, desc) { + var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; + if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); + else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; + return c > 3 && r && Object.defineProperty(target, key, r), r; + }; + + __param = function (paramIndex, decorator) { + return function (target, key) { decorator(target, key, paramIndex); } + }; + + __metadata = function (metadataKey, metadataValue) { + if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); + }; + + __awaiter = function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); + }; + + __generator = function (thisArg, body) { + var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; + return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; + function verb(n) { return function (v) { return step([n, v]); }; } + function step(op) { + if (f) throw new TypeError("Generator is already executing."); + while (_) try { + if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; + if (y = 0, t) op = [op[0] & 2, t.value]; + switch (op[0]) { + case 0: case 1: t = op; break; + case 4: _.label++; return { value: op[1], done: false }; + case 5: _.label++; y = op[1]; op = [0]; continue; + case 7: op = _.ops.pop(); _.trys.pop(); continue; + default: + if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } + if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } + if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } + if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } + if (t[2]) _.ops.pop(); + _.trys.pop(); continue; + } + op = body.call(thisArg, _); + } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } + if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; + } + }; + + __exportStar = function(m, o) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); + }; + + __createBinding = Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + }) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; + }); + + __values = function (o) { + var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; + if (m) return m.call(o); + if (o && typeof o.length === "number") return { + next: function () { + if (o && i >= o.length) o = void 0; + return { value: o && o[i++], done: !o }; + } + }; + throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); + }; + + __read = function (o, n) { + var m = typeof Symbol === "function" && o[Symbol.iterator]; + if (!m) return o; + var i = m.call(o), r, ar = [], e; + try { + while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); + } + catch (error) { e = { error: error }; } + finally { + try { + if (r && !r.done && (m = i["return"])) m.call(i); + } + finally { if (e) throw e.error; } + } + return ar; + }; + + /** @deprecated */ + __spread = function () { + for (var ar = [], i = 0; i < arguments.length; i++) + ar = ar.concat(__read(arguments[i])); + return ar; + }; + + /** @deprecated */ + __spreadArrays = function () { + for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; + for (var r = Array(s), k = 0, i = 0; i < il; i++) + for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) + r[k] = a[j]; + return r; + }; + + __spreadArray = function (to, from, pack) { + if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { + if (ar || !(i in from)) { + if (!ar) ar = Array.prototype.slice.call(from, 0, i); + ar[i] = from[i]; + } + } + return to.concat(ar || Array.prototype.slice.call(from)); + }; + + __await = function (v) { + return this instanceof __await ? (this.v = v, this) : new __await(v); + }; + + __asyncGenerator = function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } + }; + + __asyncDelegator = function (o) { + var i, p; + return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; + function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } + }; + + __asyncValues = function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } + }; + + __makeTemplateObject = function (cooked, raw) { + if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } + return cooked; + }; + + var __setModuleDefault = Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); + }) : function(o, v) { + o["default"] = v; + }; + + __importStar = function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; + }; + + __importDefault = function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; + }; + + __classPrivateFieldGet = function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); + }; + + __classPrivateFieldSet = function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; + }; + + exporter("__extends", __extends); + exporter("__assign", __assign); + exporter("__rest", __rest); + exporter("__decorate", __decorate); + exporter("__param", __param); + exporter("__metadata", __metadata); + exporter("__awaiter", __awaiter); + exporter("__generator", __generator); + exporter("__exportStar", __exportStar); + exporter("__createBinding", __createBinding); + exporter("__values", __values); + exporter("__read", __read); + exporter("__spread", __spread); + exporter("__spreadArrays", __spreadArrays); + exporter("__spreadArray", __spreadArray); + exporter("__await", __await); + exporter("__asyncGenerator", __asyncGenerator); + exporter("__asyncDelegator", __asyncDelegator); + exporter("__asyncValues", __asyncValues); + exporter("__makeTemplateObject", __makeTemplateObject); + exporter("__importStar", __importStar); + exporter("__importDefault", __importDefault); + exporter("__classPrivateFieldGet", __classPrivateFieldGet); + exporter("__classPrivateFieldSet", __classPrivateFieldSet); +}); + + +/***/ }), /* 609 */, /* 610 */, /* 611 */ @@ -47920,7 +48501,7 @@ module.exports = require("net"); -var Punycode = __webpack_require__(213); +var Punycode = __webpack_require__(815); var internals = {}; @@ -54056,18 +54637,12 @@ exports.restoreCache = restoreCache; * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache(paths, key, options) { + var _a, _b, _c, _d, _e; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(key); const compressionMethod = yield utils.getCompressionMethod(); - core.debug('Reserving Cache'); - const cacheId = yield cacheHttpClient.reserveCache(key, paths, { - compressionMethod - }); - if (cacheId === -1) { - throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); - } - core.debug(`Cache ID: ${cacheId}`); + let cacheId = null; const cachePaths = yield utils.resolvePaths(paths); core.debug('Cache Paths:'); core.debug(`${JSON.stringify(cachePaths)}`); @@ -54082,9 +54657,24 @@ function saveCache(paths, key, options) { const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.debug(`File Size: ${archiveFileSize}`); - if (archiveFileSize > fileSizeLimit) { + // For GHES, this check will take place in ReserveCache API with enterprise file size limit + if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } + core.debug('Reserving Cache'); + const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { + compressionMethod, + cacheSize: archiveFileSize + }); + if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { + cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; + } + else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { + throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); + } + else { + throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); + } core.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, options); } @@ -62841,313 +63431,7 @@ exports.asciiSerializationOfAnOrigin = asciiSerializationOfAnOrigin; /* 815 */ /***/ (function(module) { -/*! ***************************************************************************** -Copyright (c) Microsoft Corporation. - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH -REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY -AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, -INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM -LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR -OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR -PERFORMANCE OF THIS SOFTWARE. -***************************************************************************** */ -/* global global, define, System, Reflect, Promise */ -var __extends; -var __assign; -var __rest; -var __decorate; -var __param; -var __metadata; -var __awaiter; -var __generator; -var __exportStar; -var __values; -var __read; -var __spread; -var __spreadArrays; -var __spreadArray; -var __await; -var __asyncGenerator; -var __asyncDelegator; -var __asyncValues; -var __makeTemplateObject; -var __importStar; -var __importDefault; -var __classPrivateFieldGet; -var __classPrivateFieldSet; -var __createBinding; -(function (factory) { - var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; - if (typeof define === "function" && define.amd) { - define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); - } - else if ( true && typeof module.exports === "object") { - factory(createExporter(root, createExporter(module.exports))); - } - else { - factory(createExporter(root)); - } - function createExporter(exports, previous) { - if (exports !== root) { - if (typeof Object.create === "function") { - Object.defineProperty(exports, "__esModule", { value: true }); - } - else { - exports.__esModule = true; - } - } - return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; - } -}) -(function (exporter) { - var extendStatics = Object.setPrototypeOf || - ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || - function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; - - __extends = function (d, b) { - if (typeof b !== "function" && b !== null) - throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); - extendStatics(d, b); - function __() { this.constructor = d; } - d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); - }; - - __assign = Object.assign || function (t) { - for (var s, i = 1, n = arguments.length; i < n; i++) { - s = arguments[i]; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; - } - return t; - }; - - __rest = function (s, e) { - var t = {}; - for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) - t[p] = s[p]; - if (s != null && typeof Object.getOwnPropertySymbols === "function") - for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { - if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) - t[p[i]] = s[p[i]]; - } - return t; - }; - - __decorate = function (decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); - else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - - __param = function (paramIndex, decorator) { - return function (target, key) { decorator(target, key, paramIndex); } - }; - - __metadata = function (metadataKey, metadataValue) { - if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); - }; - - __awaiter = function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - - __generator = function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } - }; - - __exportStar = function(m, o) { - for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); - }; - - __createBinding = Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); - }) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; - }); - - __values = function (o) { - var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; - if (m) return m.call(o); - if (o && typeof o.length === "number") return { - next: function () { - if (o && i >= o.length) o = void 0; - return { value: o && o[i++], done: !o }; - } - }; - throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); - }; - - __read = function (o, n) { - var m = typeof Symbol === "function" && o[Symbol.iterator]; - if (!m) return o; - var i = m.call(o), r, ar = [], e; - try { - while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); - } - catch (error) { e = { error: error }; } - finally { - try { - if (r && !r.done && (m = i["return"])) m.call(i); - } - finally { if (e) throw e.error; } - } - return ar; - }; - - /** @deprecated */ - __spread = function () { - for (var ar = [], i = 0; i < arguments.length; i++) - ar = ar.concat(__read(arguments[i])); - return ar; - }; - - /** @deprecated */ - __spreadArrays = function () { - for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; - for (var r = Array(s), k = 0, i = 0; i < il; i++) - for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) - r[k] = a[j]; - return r; - }; - - __spreadArray = function (to, from, pack) { - if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { - if (ar || !(i in from)) { - if (!ar) ar = Array.prototype.slice.call(from, 0, i); - ar[i] = from[i]; - } - } - return to.concat(ar || Array.prototype.slice.call(from)); - }; - - __await = function (v) { - return this instanceof __await ? (this.v = v, this) : new __await(v); - }; - - __asyncGenerator = function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } - }; - - __asyncDelegator = function (o) { - var i, p; - return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; - function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: n === "return" } : f ? f(v) : v; } : f; } - }; - - __asyncValues = function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } - }; - - __makeTemplateObject = function (cooked, raw) { - if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } - return cooked; - }; - - var __setModuleDefault = Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); - }) : function(o, v) { - o["default"] = v; - }; - - __importStar = function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; - }; - - __importDefault = function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; - }; - - __classPrivateFieldGet = function (receiver, state, kind, f) { - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); - return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); - }; - - __classPrivateFieldSet = function (receiver, state, value, kind, f) { - if (kind === "m") throw new TypeError("Private method is not writable"); - if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); - if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); - return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; - }; - - exporter("__extends", __extends); - exporter("__assign", __assign); - exporter("__rest", __rest); - exporter("__decorate", __decorate); - exporter("__param", __param); - exporter("__metadata", __metadata); - exporter("__awaiter", __awaiter); - exporter("__generator", __generator); - exporter("__exportStar", __exportStar); - exporter("__createBinding", __createBinding); - exporter("__values", __values); - exporter("__read", __read); - exporter("__spread", __spread); - exporter("__spreadArrays", __spreadArrays); - exporter("__spreadArray", __spreadArray); - exporter("__await", __await); - exporter("__asyncGenerator", __asyncGenerator); - exporter("__asyncDelegator", __asyncDelegator); - exporter("__asyncValues", __asyncValues); - exporter("__makeTemplateObject", __makeTemplateObject); - exporter("__importStar", __importStar); - exporter("__importDefault", __importDefault); - exporter("__classPrivateFieldGet", __classPrivateFieldGet); - exporter("__classPrivateFieldSet", __classPrivateFieldSet); -}); - +module.exports = require("punycode"); /***/ }), /* 816 */, @@ -63580,7 +63864,7 @@ module.exports = diff Object.defineProperty(exports, '__esModule', { value: true }); var coreHttp = __webpack_require__(330); -var tslib = __webpack_require__(815); +var tslib = __webpack_require__(608); var coreTracing = __webpack_require__(72); var logger$1 = __webpack_require__(492); var abortController = __webpack_require__(819); @@ -63593,6 +63877,29 @@ var events = __webpack_require__(614); var fs = __webpack_require__(747); var util = __webpack_require__(669); +function _interopNamespace(e) { + if (e && e.__esModule) return e; + var n = Object.create(null); + if (e) { + Object.keys(e).forEach(function (k) { + if (k !== 'default') { + var d = Object.getOwnPropertyDescriptor(e, k); + Object.defineProperty(n, k, d.get ? d : { + enumerable: true, + get: function () { return e[k]; } + }); + } + }); + } + n["default"] = e; + return Object.freeze(n); +} + +var coreHttp__namespace = /*#__PURE__*/_interopNamespace(coreHttp); +var os__namespace = /*#__PURE__*/_interopNamespace(os); +var fs__namespace = /*#__PURE__*/_interopNamespace(fs); +var util__namespace = /*#__PURE__*/_interopNamespace(util); + /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. @@ -64547,10 +64854,10 @@ const BlobItemInternal = { modelProperties: { name: { serializedName: "Name", - required: true, xmlName: "Name", type: { - name: "String" + name: "Composite", + className: "BlobName" } }, deleted: { @@ -64625,6 +64932,30 @@ const BlobItemInternal = { } } }; +const BlobName = { + serializedName: "BlobName", + type: { + name: "Composite", + className: "BlobName", + modelProperties: { + encoded: { + serializedName: "Encoded", + xmlName: "Encoded", + xmlIsAttribute: true, + type: { + name: "Boolean" + } + }, + content: { + serializedName: "content", + xmlName: "content", + type: { + name: "String" + } + } + } + } +}; const BlobPropertiesInternal = { serializedName: "BlobPropertiesInternal", xmlName: "Properties", @@ -65068,10 +65399,10 @@ const BlobPrefix = { modelProperties: { name: { serializedName: "Name", - required: true, xmlName: "Name", type: { - name: "String" + name: "Composite", + className: "BlobName" } } } @@ -66694,6 +67025,59 @@ const ContainerSubmitBatchExceptionHeaders = { } } }; +const ContainerFilterBlobsHeaders = { + serializedName: "Container_filterBlobsHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsHeaders", + modelProperties: { + clientRequestId: { + serializedName: "x-ms-client-request-id", + xmlName: "x-ms-client-request-id", + type: { + name: "String" + } + }, + requestId: { + serializedName: "x-ms-request-id", + xmlName: "x-ms-request-id", + type: { + name: "String" + } + }, + version: { + serializedName: "x-ms-version", + xmlName: "x-ms-version", + type: { + name: "String" + } + }, + date: { + serializedName: "date", + xmlName: "date", + type: { + name: "DateTimeRfc1123" + } + } + } + } +}; +const ContainerFilterBlobsExceptionHeaders = { + serializedName: "Container_filterBlobsExceptionHeaders", + type: { + name: "Composite", + className: "ContainerFilterBlobsExceptionHeaders", + modelProperties: { + errorCode: { + serializedName: "x-ms-error-code", + xmlName: "x-ms-error-code", + type: { + name: "String" + } + } + } + } +}; const ContainerAcquireLeaseHeaders = { serializedName: "Container_acquireLeaseHeaders", type: { @@ -69202,6 +69586,13 @@ const BlobCopyFromURLHeaders = { name: "ByteArray" } }, + encryptionScope: { + serializedName: "x-ms-encryption-scope", + xmlName: "x-ms-encryption-scope", + type: { + name: "String" + } + }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", @@ -71719,6 +72110,7 @@ var Mappers = /*#__PURE__*/Object.freeze({ ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, BlobFlatListSegment: BlobFlatListSegment, BlobItemInternal: BlobItemInternal, + BlobName: BlobName, BlobPropertiesInternal: BlobPropertiesInternal, ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, BlobHierarchyListSegment: BlobHierarchyListSegment, @@ -71770,6 +72162,8 @@ var Mappers = /*#__PURE__*/Object.freeze({ ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, + ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, + ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, @@ -71957,7 +72351,7 @@ const timeoutInSeconds = { const version = { parameterPath: "version", mapper: { - defaultValue: "2020-10-02", + defaultValue: "2021-04-10", isConstant: true, serializedName: "x-ms-version", type: { @@ -72052,7 +72446,7 @@ const include = { element: { type: { name: "Enum", - allowedValues: ["metadata", "deleted"] + allowedValues: ["metadata", "deleted", "system"] } } } @@ -72574,11 +72968,10 @@ const encryptionKeySha256 = { } }; const encryptionAlgorithm = { - parameterPath: ["options", "encryptionAlgorithm"], + parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], mapper: { - defaultValue: "AES256", - isConstant: true, serializedName: "x-ms-encryption-algorithm", + xmlName: "x-ms-encryption-algorithm", type: { name: "String" } @@ -73495,7 +73888,7 @@ class Service { setProperties(blobServiceProperties, options) { const operationArguments = { blobServiceProperties, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setPropertiesOperationSpec); } @@ -73506,9 +73899,9 @@ class Service { */ getProperties(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); } /** * Retrieves statistics related to replication for the Blob service. It is only available on the @@ -73518,7 +73911,7 @@ class Service { */ getStatistics(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getStatisticsOperationSpec); } @@ -73528,7 +73921,7 @@ class Service { */ listContainersSegment(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listContainersSegmentOperationSpec); } @@ -73541,7 +73934,7 @@ class Service { getUserDelegationKey(keyInfo, options) { const operationArguments = { keyInfo, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getUserDelegationKeyOperationSpec); } @@ -73551,9 +73944,9 @@ class Service { */ getAccountInfo(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. @@ -73568,9 +73961,9 @@ class Service { contentLength, multipartContentType, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a @@ -73580,13 +73973,13 @@ class Service { */ filterBlobs(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec$1); } } // Operation Specifications -const xmlSerializer = new coreHttp.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$5 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); const setPropertiesOperationSpec = { path: "/", httpMethod: "PUT", @@ -73615,9 +74008,9 @@ const setPropertiesOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const getPropertiesOperationSpec = { +const getPropertiesOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { @@ -73642,7 +74035,7 @@ const getPropertiesOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; const getStatisticsOperationSpec = { path: "/", @@ -73669,7 +74062,7 @@ const getStatisticsOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; const listContainersSegmentOperationSpec = { path: "/", @@ -73699,7 +74092,7 @@ const listContainersSegmentOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; const getUserDelegationKeyOperationSpec = { path: "/", @@ -73730,9 +74123,9 @@ const getUserDelegationKeyOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const getAccountInfoOperationSpec = { +const getAccountInfoOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { @@ -73748,9 +74141,9 @@ const getAccountInfoOperationSpec = { urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const submitBatchOperationSpec = { +const submitBatchOperationSpec$1 = { path: "/", httpMethod: "POST", responses: { @@ -73780,9 +74173,9 @@ const submitBatchOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer + serializer: xmlSerializer$5 }; -const filterBlobsOperationSpec = { +const filterBlobsOperationSpec$1 = { path: "/", httpMethod: "GET", responses: { @@ -73809,7 +74202,7 @@ const filterBlobsOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer + serializer: xmlSerializer$5 }; /* @@ -73835,9 +74228,9 @@ class Container { */ create(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec); + return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); } /** * returns all user-defined metadata and system properties for the specified container. The data @@ -73846,7 +74239,7 @@ class Container { */ getProperties(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$1); } @@ -73857,9 +74250,9 @@ class Container { */ delete(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); } /** * operation sets one or more user-defined name-value pairs for the specified container. @@ -73867,9 +74260,9 @@ class Container { */ setMetadata(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); } /** * gets the permissions for the specified container. The permissions indicate whether container data @@ -73878,7 +74271,7 @@ class Container { */ getAccessPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccessPolicyOperationSpec); } @@ -73889,7 +74282,7 @@ class Container { */ setAccessPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setAccessPolicyOperationSpec); } @@ -73899,7 +74292,7 @@ class Container { */ restore(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, restoreOperationSpec); } @@ -73911,7 +74304,7 @@ class Container { rename(sourceContainerName, options) { const operationArguments = { sourceContainerName, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, renameOperationSpec); } @@ -73928,9 +74321,20 @@ class Container { contentLength, multipartContentType, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, submitBatchOperationSpec); + } + /** + * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given + * search expression. Filter blobs searches within the given container. + * @param options The options parameters. + */ + filterBlobs(options) { + const operationArguments = { + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) + }; + return this.client.sendOperationRequest(operationArguments, filterBlobsOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -73939,9 +74343,9 @@ class Container { */ acquireLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -73952,9 +74356,9 @@ class Container { releaseLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -73965,9 +74369,9 @@ class Container { renewLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -73976,9 +74380,9 @@ class Container { */ breakLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can @@ -73993,9 +74397,9 @@ class Container { const operationArguments = { leaseId, proposedLeaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container @@ -74003,7 +74407,7 @@ class Container { */ listBlobFlatSegment(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listBlobFlatSegmentOperationSpec); } @@ -74018,7 +74422,7 @@ class Container { listBlobHierarchySegment(delimiter, options) { const operationArguments = { delimiter, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, listBlobHierarchySegmentOperationSpec); } @@ -74028,14 +74432,14 @@ class Container { */ getAccountInfo(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$1); } } // Operation Specifications -const xmlSerializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const createOperationSpec = { +const xmlSerializer$4 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const createOperationSpec$2 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -74059,7 +74463,7 @@ const createOperationSpec = { preventEncryptionScopeOverride ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const getPropertiesOperationSpec$1 = { path: "/{containerName}", @@ -74082,9 +74486,9 @@ const getPropertiesOperationSpec$1 = { leaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const deleteOperationSpec = { +const deleteOperationSpec$1 = { path: "/{containerName}", httpMethod: "DELETE", responses: { @@ -74107,9 +74511,9 @@ const deleteOperationSpec = { ifUnmodifiedSince ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const setMetadataOperationSpec = { +const setMetadataOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -74136,7 +74540,7 @@ const setMetadataOperationSpec = { ifModifiedSince ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const getAccessPolicyOperationSpec = { path: "/{containerName}", @@ -74175,7 +74579,7 @@ const getAccessPolicyOperationSpec = { leaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const setAccessPolicyOperationSpec = { path: "/{containerName}", @@ -74209,7 +74613,7 @@ const setAccessPolicyOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const restoreOperationSpec = { path: "/{containerName}", @@ -74237,7 +74641,7 @@ const restoreOperationSpec = { deletedContainerVersion ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const renameOperationSpec = { path: "/{containerName}", @@ -74265,9 +74669,9 @@ const renameOperationSpec = { sourceLeaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const submitBatchOperationSpec$1 = { +const submitBatchOperationSpec = { path: "/{containerName}", httpMethod: "POST", responses: { @@ -74301,9 +74705,39 @@ const submitBatchOperationSpec$1 = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const acquireLeaseOperationSpec = { +const filterBlobsOperationSpec = { + path: "/{containerName}", + httpMethod: "GET", + responses: { + 200: { + bodyMapper: FilterBlobSegment, + headersMapper: ContainerFilterBlobsHeaders + }, + default: { + bodyMapper: StorageError, + headersMapper: ContainerFilterBlobsExceptionHeaders + } + }, + queryParameters: [ + timeoutInSeconds, + marker, + maxPageSize, + comp5, + where, + restype2 + ], + urlParameters: [url], + headerParameters: [ + version, + requestId, + accept1 + ], + isXML: true, + serializer: xmlSerializer$4 +}; +const acquireLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -74332,9 +74766,9 @@ const acquireLeaseOperationSpec = { proposedLeaseId ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const releaseLeaseOperationSpec = { +const releaseLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -74362,9 +74796,9 @@ const releaseLeaseOperationSpec = { leaseId1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const renewLeaseOperationSpec = { +const renewLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -74392,9 +74826,9 @@ const renewLeaseOperationSpec = { action2 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const breakLeaseOperationSpec = { +const breakLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -74422,9 +74856,9 @@ const breakLeaseOperationSpec = { breakPeriod ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; -const changeLeaseOperationSpec = { +const changeLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { @@ -74453,7 +74887,7 @@ const changeLeaseOperationSpec = { proposedLeaseId1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const listBlobFlatSegmentOperationSpec = { path: "/{containerName}", @@ -74484,7 +74918,7 @@ const listBlobFlatSegmentOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const listBlobHierarchySegmentOperationSpec = { path: "/{containerName}", @@ -74516,7 +74950,7 @@ const listBlobHierarchySegmentOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; const getAccountInfoOperationSpec$1 = { path: "/{containerName}", @@ -74534,7 +74968,7 @@ const getAccountInfoOperationSpec$1 = { urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer$1 + serializer: xmlSerializer$4 }; /* @@ -74560,7 +74994,7 @@ class Blob$1 { */ download(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, downloadOperationSpec); } @@ -74571,9 +75005,9 @@ class Blob$1 { */ getProperties(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, getPropertiesOperationSpec); } /** * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is @@ -74592,9 +75026,9 @@ class Blob$1 { */ delete(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, deleteOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, deleteOperationSpec); } /** * Undelete a blob that was previously soft deleted @@ -74602,7 +75036,7 @@ class Blob$1 { */ undelete(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, undeleteOperationSpec); } @@ -74614,7 +75048,7 @@ class Blob$1 { setExpiry(expiryOptions, options) { const operationArguments = { expiryOptions, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setExpiryOperationSpec); } @@ -74624,7 +75058,7 @@ class Blob$1 { */ setHttpHeaders(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setHttpHeadersOperationSpec); } @@ -74634,7 +75068,7 @@ class Blob$1 { */ setImmutabilityPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setImmutabilityPolicyOperationSpec); } @@ -74644,7 +75078,7 @@ class Blob$1 { */ deleteImmutabilityPolicy(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, deleteImmutabilityPolicyOperationSpec); } @@ -74656,7 +75090,7 @@ class Blob$1 { setLegalHold(legalHold, options) { const operationArguments = { legalHold, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setLegalHoldOperationSpec); } @@ -74667,9 +75101,9 @@ class Blob$1 { */ setMetadata(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, setMetadataOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -74678,9 +75112,9 @@ class Blob$1 { */ acquireLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, acquireLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -74691,9 +75125,9 @@ class Blob$1 { releaseLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, releaseLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -74704,9 +75138,9 @@ class Blob$1 { renewLease(leaseId, options) { const operationArguments = { leaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, renewLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -74721,9 +75155,9 @@ class Blob$1 { const operationArguments = { leaseId, proposedLeaseId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, changeLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete @@ -74732,9 +75166,9 @@ class Blob$1 { */ breakLease(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec$1); + return this.client.sendOperationRequest(operationArguments, breakLeaseOperationSpec); } /** * The Create Snapshot operation creates a read-only snapshot of a blob @@ -74742,7 +75176,7 @@ class Blob$1 { */ createSnapshot(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createSnapshotOperationSpec); } @@ -74757,7 +75191,7 @@ class Blob$1 { startCopyFromURL(copySource, options) { const operationArguments = { copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, startCopyFromURLOperationSpec); } @@ -74773,7 +75207,7 @@ class Blob$1 { copyFromURL(copySource, options) { const operationArguments = { copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, copyFromURLOperationSpec); } @@ -74787,7 +75221,7 @@ class Blob$1 { abortCopyFromURL(copyId, options) { const operationArguments = { copyId, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, abortCopyFromURLOperationSpec); } @@ -74803,7 +75237,7 @@ class Blob$1 { setTier(tier, options) { const operationArguments = { tier, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setTierOperationSpec); } @@ -74813,9 +75247,9 @@ class Blob$1 { */ getAccountInfo(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, getAccountInfoOperationSpec); } /** * The Query operation enables users to select/project on blob data by providing simple query @@ -74824,7 +75258,7 @@ class Blob$1 { */ query(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, queryOperationSpec); } @@ -74834,7 +75268,7 @@ class Blob$1 { */ getTags(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getTagsOperationSpec); } @@ -74844,13 +75278,13 @@ class Blob$1 { */ setTags(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, setTagsOperationSpec); } } // Operation Specifications -const xmlSerializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ true); +const xmlSerializer$3 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); const downloadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", @@ -74898,9 +75332,9 @@ const downloadOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const getPropertiesOperationSpec$2 = { +const getPropertiesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "HEAD", responses: { @@ -74933,9 +75367,9 @@ const getPropertiesOperationSpec$2 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const deleteOperationSpec$1 = { +const deleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { @@ -74967,7 +75401,7 @@ const deleteOperationSpec$1 = { deleteSnapshots ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const undeleteOperationSpec = { path: "/{containerName}/{blob}", @@ -74989,7 +75423,7 @@ const undeleteOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setExpiryOperationSpec = { path: "/{containerName}/{blob}", @@ -75013,7 +75447,7 @@ const setExpiryOperationSpec = { expiresOn ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setHttpHeadersOperationSpec = { path: "/{containerName}/{blob}", @@ -75047,7 +75481,7 @@ const setHttpHeadersOperationSpec = { blobContentDisposition ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", @@ -75072,7 +75506,7 @@ const setImmutabilityPolicyOperationSpec = { immutabilityPolicyMode ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const deleteImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", @@ -75094,7 +75528,7 @@ const deleteImmutabilityPolicyOperationSpec = { accept1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setLegalHoldOperationSpec = { path: "/{containerName}/{blob}", @@ -75117,9 +75551,9 @@ const setLegalHoldOperationSpec = { legalHold ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const setMetadataOperationSpec$1 = { +const setMetadataOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75150,9 +75584,9 @@ const setMetadataOperationSpec$1 = { encryptionScope ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const acquireLeaseOperationSpec$1 = { +const acquireLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75180,9 +75614,9 @@ const acquireLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const releaseLeaseOperationSpec$1 = { +const releaseLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75209,9 +75643,9 @@ const releaseLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const renewLeaseOperationSpec$1 = { +const renewLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75238,9 +75672,9 @@ const renewLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const changeLeaseOperationSpec$1 = { +const changeLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75268,9 +75702,9 @@ const changeLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const breakLeaseOperationSpec$1 = { +const breakLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -75297,7 +75731,7 @@ const breakLeaseOperationSpec$1 = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const createSnapshotOperationSpec = { path: "/{containerName}/{blob}", @@ -75330,7 +75764,7 @@ const createSnapshotOperationSpec = { encryptionScope ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const startCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -75372,7 +75806,7 @@ const startCopyFromURLOperationSpec = { legalHold1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const copyFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -75401,6 +75835,7 @@ const copyFromURLOperationSpec = { ifTags, immutabilityPolicyExpiry, immutabilityPolicyMode, + encryptionScope, tier, sourceIfModifiedSince, sourceIfUnmodifiedSince, @@ -75414,7 +75849,7 @@ const copyFromURLOperationSpec = { copySourceAuthorization ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const abortCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -75442,7 +75877,7 @@ const abortCopyFromURLOperationSpec = { copyActionAbortConstant ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setTierOperationSpec = { path: "/{containerName}/{blob}", @@ -75476,9 +75911,9 @@ const setTierOperationSpec = { tier1 ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; -const getAccountInfoOperationSpec$2 = { +const getAccountInfoOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { @@ -75494,7 +75929,7 @@ const getAccountInfoOperationSpec$2 = { urlParameters: [url], headerParameters: [version, accept1], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const queryOperationSpec = { path: "/{containerName}/{blob}", @@ -75544,7 +75979,7 @@ const queryOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const getTagsOperationSpec = { path: "/{containerName}/{blob}", @@ -75574,7 +76009,7 @@ const getTagsOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; const setTagsOperationSpec = { path: "/{containerName}/{blob}", @@ -75608,7 +76043,7 @@ const setTagsOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$2 + serializer: xmlSerializer$3 }; /* @@ -75638,7 +76073,7 @@ class PageBlob { const operationArguments = { contentLength, blobContentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, createOperationSpec$1); } @@ -75652,7 +76087,7 @@ class PageBlob { const operationArguments = { contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadPagesOperationSpec); } @@ -75664,7 +76099,7 @@ class PageBlob { clearPages(contentLength, options) { const operationArguments = { contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, clearPagesOperationSpec); } @@ -75685,7 +76120,7 @@ class PageBlob { sourceRange, contentLength, range, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadPagesFromURLOperationSpec); } @@ -75696,7 +76131,7 @@ class PageBlob { */ getPageRanges(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPageRangesOperationSpec); } @@ -75707,7 +76142,7 @@ class PageBlob { */ getPageRangesDiff(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getPageRangesDiffOperationSpec); } @@ -75720,7 +76155,7 @@ class PageBlob { resize(blobContentLength, options) { const operationArguments = { blobContentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, resizeOperationSpec); } @@ -75734,7 +76169,7 @@ class PageBlob { updateSequenceNumber(sequenceNumberAction, options) { const operationArguments = { sequenceNumberAction, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, updateSequenceNumberOperationSpec); } @@ -75753,14 +76188,14 @@ class PageBlob { copyIncremental(copySource, options) { const operationArguments = { copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, copyIncrementalOperationSpec); } } // Operation Specifications -const xmlSerializer$3 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer = new coreHttp.Serializer(Mappers, /* isXml */ false); +const xmlSerializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$2 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); const createOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", @@ -75807,7 +76242,7 @@ const createOperationSpec$1 = { blobSequenceNumber ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const uploadPagesOperationSpec = { path: "/{containerName}/{blob}", @@ -75849,7 +76284,7 @@ const uploadPagesOperationSpec = { ifSequenceNumberEqualTo ], mediaType: "binary", - serializer + serializer: serializer$2 }; const clearPagesOperationSpec = { path: "/{containerName}/{blob}", @@ -75887,7 +76322,7 @@ const clearPagesOperationSpec = { pageWrite1 ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const uploadPagesFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -75934,7 +76369,7 @@ const uploadPagesFromURLOperationSpec = { range1 ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const getPageRangesOperationSpec = { path: "/{containerName}/{blob}", @@ -75968,7 +76403,7 @@ const getPageRangesOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const getPageRangesDiffOperationSpec = { path: "/{containerName}/{blob}", @@ -76004,7 +76439,7 @@ const getPageRangesDiffOperationSpec = { prevSnapshotUrl ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const resizeOperationSpec = { path: "/{containerName}/{blob}", @@ -76037,7 +76472,7 @@ const resizeOperationSpec = { blobContentLength ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const updateSequenceNumberOperationSpec = { path: "/{containerName}/{blob}", @@ -76067,7 +76502,7 @@ const updateSequenceNumberOperationSpec = { sequenceNumberAction ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; const copyIncrementalOperationSpec = { path: "/{containerName}/{blob}", @@ -76095,7 +76530,7 @@ const copyIncrementalOperationSpec = { copySource ], isXML: true, - serializer: xmlSerializer$3 + serializer: xmlSerializer$2 }; /* @@ -76122,9 +76557,9 @@ class AppendBlob { create(contentLength, options) { const operationArguments = { contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; - return this.client.sendOperationRequest(operationArguments, createOperationSpec$2); + return this.client.sendOperationRequest(operationArguments, createOperationSpec); } /** * The Append Block operation commits a new block of data to the end of an existing append blob. The @@ -76138,7 +76573,7 @@ class AppendBlob { const operationArguments = { contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, appendBlockOperationSpec); } @@ -76155,7 +76590,7 @@ class AppendBlob { const operationArguments = { sourceUrl, contentLength, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, appendBlockFromUrlOperationSpec); } @@ -76166,15 +76601,15 @@ class AppendBlob { */ seal(options) { const operationArguments = { - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, sealOperationSpec); } } // Operation Specifications -const xmlSerializer$4 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer$1 = new coreHttp.Serializer(Mappers, /* isXml */ false); -const createOperationSpec$2 = { +const xmlSerializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer$1 = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); +const createOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { @@ -76217,7 +76652,7 @@ const createOperationSpec$2 = { blobType1 ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; const appendBlockOperationSpec = { path: "/{containerName}/{blob}", @@ -76301,7 +76736,7 @@ const appendBlockFromUrlOperationSpec = { sourceRange1 ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; const sealOperationSpec = { path: "/{containerName}/{blob}", @@ -76329,7 +76764,7 @@ const sealOperationSpec = { appendPosition ], isXML: true, - serializer: xmlSerializer$4 + serializer: xmlSerializer$1 }; /* @@ -76361,7 +76796,7 @@ class BlockBlob { const operationArguments = { contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, uploadOperationSpec); } @@ -76382,7 +76817,7 @@ class BlockBlob { const operationArguments = { contentLength, copySource, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, putBlobFromUrlOperationSpec); } @@ -76400,7 +76835,7 @@ class BlockBlob { blockId, contentLength, body, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, stageBlockOperationSpec); } @@ -76419,7 +76854,7 @@ class BlockBlob { blockId, contentLength, sourceUrl, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, stageBlockFromURLOperationSpec); } @@ -76437,7 +76872,7 @@ class BlockBlob { commitBlockList(blocks, options) { const operationArguments = { blocks, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, commitBlockListOperationSpec); } @@ -76451,14 +76886,14 @@ class BlockBlob { getBlockList(listType, options) { const operationArguments = { listType, - options: coreHttp.operationOptionsToRequestOptionsBase(options || {}) + options: coreHttp__namespace.operationOptionsToRequestOptionsBase(options || {}) }; return this.client.sendOperationRequest(operationArguments, getBlockListOperationSpec); } } // Operation Specifications -const xmlSerializer$5 = new coreHttp.Serializer(Mappers, /* isXml */ true); -const serializer$2 = new coreHttp.Serializer(Mappers, /* isXml */ false); +const xmlSerializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ true); +const serializer = new coreHttp__namespace.Serializer(Mappers, /* isXml */ false); const uploadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", @@ -76506,7 +76941,7 @@ const uploadOperationSpec = { blobType2 ], mediaType: "binary", - serializer: serializer$2 + serializer }; const putBlobFromUrlOperationSpec = { path: "/{containerName}/{blob}", @@ -76559,7 +76994,7 @@ const putBlobFromUrlOperationSpec = { copySourceBlobProperties ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; const stageBlockOperationSpec = { path: "/{containerName}/{blob}", @@ -76595,7 +77030,7 @@ const stageBlockOperationSpec = { accept2 ], mediaType: "binary", - serializer: serializer$2 + serializer }; const stageBlockFromURLOperationSpec = { path: "/{containerName}/{blob}", @@ -76636,7 +77071,7 @@ const stageBlockFromURLOperationSpec = { sourceRange1 ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; const commitBlockListOperationSpec = { path: "/{containerName}/{blob}", @@ -76686,7 +77121,7 @@ const commitBlockListOperationSpec = { isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", - serializer: xmlSerializer$5 + serializer: xmlSerializer }; const getBlockListOperationSpec = { path: "/{containerName}/{blob}", @@ -76716,7 +77151,7 @@ const getBlockListOperationSpec = { ifTags ], isXML: true, - serializer: xmlSerializer$5 + serializer: xmlSerializer }; // Copyright (c) Microsoft Corporation. @@ -76727,8 +77162,8 @@ const logger = logger$1.createClientLogger("storage-blob"); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. -const SDK_VERSION = "12.8.0"; -const SERVICE_VERSION = "2020-10-02"; +const SDK_VERSION = "12.9.0"; +const SERVICE_VERSION = "2021-04-10"; const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB const BLOCK_BLOB_MAX_BLOCKS = 50000; @@ -76745,15 +77180,15 @@ const URLConstants = { SIGNATURE: "sig", SNAPSHOT: "snapshot", VERSIONID: "versionid", - TIMEOUT: "timeout" - } + TIMEOUT: "timeout", + }, }; const HTTPURLConnection = { HTTP_ACCEPTED: 202, HTTP_CONFLICT: 409, HTTP_NOT_FOUND: 404, HTTP_PRECON_FAILED: 412, - HTTP_RANGE_NOT_SATISFIABLE: 416 + HTTP_RANGE_NOT_SATISFIABLE: 416, }; const HeaderConstants = { AUTHORIZATION: "Authorization", @@ -76778,7 +77213,7 @@ const HeaderConstants = { X_MS_COPY_SOURCE: "x-ms-copy-source", X_MS_DATE: "x-ms-date", X_MS_ERROR_CODE: "x-ms-error-code", - X_MS_VERSION: "x-ms-version" + X_MS_VERSION: "x-ms-version", }; const ETagNone = ""; const ETagAny = "*"; @@ -76883,7 +77318,7 @@ const StorageBlobLoggingAllowedHeaderNames = [ "x-ms-tag-count", "x-ms-encryption-key-sha256", "x-ms-if-tags", - "x-ms-source-if-tags" + "x-ms-source-if-tags", ]; const StorageBlobLoggingAllowedQueryParameters = [ "comp", @@ -76918,8 +77353,9 @@ const StorageBlobLoggingAllowedQueryParameters = [ "skt", "sktid", "skv", - "snapshot" + "snapshot", ]; +const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; // Copyright (c) Microsoft Corporation. /** @@ -77059,7 +77495,7 @@ function extractConnectionStringParts(connectionString) { url: blobEndpoint, accountName, accountKey, - proxyUri + proxyUri, }; } else { @@ -77391,14 +77827,14 @@ function toBlobTags(tags) { return undefined; } const res = { - blobTagSet: [] + blobTagSet: [], }; for (const key in tags) { if (Object.prototype.hasOwnProperty.call(tags, key)) { const value = tags[key]; res.blobTagSet.push({ key, - value + value, }); } } @@ -77438,33 +77874,33 @@ function toQuerySerialization(textConfiguration) { fieldQuote: textConfiguration.fieldQuote || "", recordSeparator: textConfiguration.recordSeparator, escapeChar: textConfiguration.escapeCharacter || "", - headersPresent: textConfiguration.hasHeaders || false - } - } + headersPresent: textConfiguration.hasHeaders || false, + }, + }, }; case "json": return { format: { type: "json", jsonTextConfiguration: { - recordSeparator: textConfiguration.recordSeparator - } - } + recordSeparator: textConfiguration.recordSeparator, + }, + }, }; case "arrow": return { format: { type: "arrow", arrowConfiguration: { - schema: textConfiguration.schema - } - } + schema: textConfiguration.schema, + }, + }, }; case "parquet": return { format: { - type: "parquet" - } + type: "parquet", + }, }; default: throw Error("Invalid BlobQueryTextConfiguration."); @@ -77488,7 +77924,7 @@ function parseObjectReplicationRecord(objectReplicationRecord) { } const rule = { ruleId: ids[1], - replicationStatus: objectReplicationRecord[key] + replicationStatus: objectReplicationRecord[key], }; const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); if (policyIndex > -1) { @@ -77497,7 +77933,7 @@ function parseObjectReplicationRecord(objectReplicationRecord) { else { orProperties.push({ policyId: ids[0], - rules: [rule] + rules: [rule], }); } } @@ -77516,6 +77952,202 @@ function attachCredential(thing, credential) { function httpAuthorizationToString(httpAuthorization) { return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; } +function BlobNameToString(name) { + if (name.encoded) { + return decodeURIComponent(name.content); + } + else { + return name.content; + } +} +function ConvertInternalResponseOfListBlobFlat(internalResponse) { + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { + var _a; + return Object.assign(Object.assign({}, internalResponse), { segment: { + blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; + }), + blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); + return blobItem; + }), + } }); +} +function decodeBase64String(value) { + if (coreHttp.isNode) { + return Buffer.from(value, "base64"); + } + else { + const byteString = atob(value); + const arr = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + arr[i] = byteString.charCodeAt(i); + } + return arr; + } +} +function ParseBoolean(content) { + if (content === undefined) + return undefined; + if (content === "true") + return true; + if (content === "false") + return false; + return undefined; +} +function ParseBlobName(blobNameInXML) { + if (blobNameInXML["$"] !== undefined && blobNameInXML["#"] !== undefined) { + return { + encoded: ParseBoolean(blobNameInXML["$"]["Encoded"]), + content: blobNameInXML["#"], + }; + } + else { + return { + encoded: false, + content: blobNameInXML, + }; + } +} +function ParseBlobItem(blobInXML) { + const blobPropertiesInXML = blobInXML["Properties"]; + const blobProperties = { + createdOn: new Date(blobPropertiesInXML["Creation-Time"]), + lastModified: new Date(blobPropertiesInXML["Last-Modified"]), + etag: blobPropertiesInXML["Etag"], + contentLength: blobPropertiesInXML["Content-Length"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["Content-Length"]), + contentType: blobPropertiesInXML["Content-Type"], + contentEncoding: blobPropertiesInXML["Content-Encoding"], + contentLanguage: blobPropertiesInXML["Content-Language"], + contentMD5: decodeBase64String(blobPropertiesInXML["Content-MD5"]), + contentDisposition: blobPropertiesInXML["Content-Disposition"], + cacheControl: blobPropertiesInXML["Cache-Control"], + blobSequenceNumber: blobPropertiesInXML["x-ms-blob-sequence-number"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["x-ms-blob-sequence-number"]), + blobType: blobPropertiesInXML["BlobType"], + leaseStatus: blobPropertiesInXML["LeaseStatus"], + leaseState: blobPropertiesInXML["LeaseState"], + leaseDuration: blobPropertiesInXML["LeaseDuration"], + copyId: blobPropertiesInXML["CopyId"], + copyStatus: blobPropertiesInXML["CopyStatus"], + copySource: blobPropertiesInXML["CopySource"], + copyProgress: blobPropertiesInXML["CopyProgress"], + copyCompletedOn: blobPropertiesInXML["CopyCompletionTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["CopyCompletionTime"]), + copyStatusDescription: blobPropertiesInXML["CopyStatusDescription"], + serverEncrypted: ParseBoolean(blobPropertiesInXML["ServerEncrypted"]), + incrementalCopy: ParseBoolean(blobPropertiesInXML["IncrementalCopy"]), + destinationSnapshot: blobPropertiesInXML["DestinationSnapshot"], + deletedOn: blobPropertiesInXML["DeletedTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["DeletedTime"]), + remainingRetentionDays: blobPropertiesInXML["RemainingRetentionDays"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["RemainingRetentionDays"]), + accessTier: blobPropertiesInXML["AccessTier"], + accessTierInferred: ParseBoolean(blobPropertiesInXML["AccessTierInferred"]), + archiveStatus: blobPropertiesInXML["ArchiveStatus"], + customerProvidedKeySha256: blobPropertiesInXML["CustomerProvidedKeySha256"], + encryptionScope: blobPropertiesInXML["EncryptionScope"], + accessTierChangedOn: blobPropertiesInXML["AccessTierChangeTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["AccessTierChangeTime"]), + tagCount: blobPropertiesInXML["TagCount"] === undefined + ? undefined + : parseFloat(blobPropertiesInXML["TagCount"]), + expiresOn: blobPropertiesInXML["Expiry-Time"] === undefined + ? undefined + : new Date(blobPropertiesInXML["Expiry-Time"]), + isSealed: ParseBoolean(blobPropertiesInXML["Sealed"]), + rehydratePriority: blobPropertiesInXML["RehydratePriority"], + lastAccessedOn: blobPropertiesInXML["LastAccessTime"] === undefined + ? undefined + : new Date(blobPropertiesInXML["LastAccessTime"]), + immutabilityPolicyExpiresOn: blobPropertiesInXML["ImmutabilityPolicyUntilDate"] === undefined + ? undefined + : new Date(blobPropertiesInXML["ImmutabilityPolicyUntilDate"]), + immutabilityPolicyMode: blobPropertiesInXML["ImmutabilityPolicyMode"], + legalHold: ParseBoolean(blobPropertiesInXML["LegalHold"]), + }; + return { + name: ParseBlobName(blobInXML["Name"]), + deleted: ParseBoolean(blobInXML["Deleted"]), + snapshot: blobInXML["Snapshot"], + versionId: blobInXML["VersionId"], + isCurrentVersion: ParseBoolean(blobInXML["IsCurrentVersion"]), + properties: blobProperties, + metadata: blobInXML["Metadata"], + blobTags: ParseBlobTags(blobInXML["Tags"]), + objectReplicationMetadata: blobInXML["OrMetadata"], + hasVersionsOnly: ParseBoolean(blobInXML["HasVersionsOnly"]), + }; +} +function ParseBlobPrefix(blobPrefixInXML) { + return { + name: ParseBlobName(blobPrefixInXML["Name"]), + }; +} +function ParseBlobTag(blobTagInXML) { + return { + key: blobTagInXML["Key"], + value: blobTagInXML["Value"], + }; +} +function ParseBlobTags(blobTagsInXML) { + if (blobTagsInXML === undefined || + blobTagsInXML["TagSet"] === undefined || + blobTagsInXML["TagSet"]["Tag"] === undefined) { + return undefined; + } + const blobTagSet = []; + if (blobTagsInXML["TagSet"]["Tag"] instanceof Array) { + blobTagsInXML["TagSet"]["Tag"].forEach((blobTagInXML) => { + blobTagSet.push(ParseBlobTag(blobTagInXML)); + }); + } + else { + blobTagSet.push(ParseBlobTag(blobTagsInXML["TagSet"]["Tag"])); + } + return { blobTagSet: blobTagSet }; +} +function ProcessBlobItems(blobArrayInXML) { + const blobItems = []; + if (blobArrayInXML instanceof Array) { + blobArrayInXML.forEach((blobInXML) => { + blobItems.push(ParseBlobItem(blobInXML)); + }); + } + else { + blobItems.push(ParseBlobItem(blobArrayInXML)); + } + return blobItems; +} +function ProcessBlobPrefixes(blobPrefixesInXML) { + const blobPrefixes = []; + if (blobPrefixesInXML instanceof Array) { + blobPrefixesInXML.forEach((blobPrefixInXML) => { + blobPrefixes.push(ParseBlobPrefix(blobPrefixInXML)); + }); + } + else { + blobPrefixes.push(ParseBlobPrefix(blobPrefixesInXML)); + } + return blobPrefixes; +} // Copyright (c) Microsoft Corporation. /** @@ -77546,9 +78178,16 @@ class StorageBrowserPolicy extends coreHttp.BaseRequestPolicy { * @param request - */ async sendRequest(request) { - { + if (coreHttp.isNode) { return this._nextPolicy.sendRequest(request); } + if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { + request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); + } + request.headers.remove(HeaderConstants.COOKIE); + // According to XHR standards, content-length should be fully controlled by browsers + request.headers.remove(HeaderConstants.CONTENT_LENGTH); + return this._nextPolicy.sendRequest(request); } } @@ -77569,6 +78208,10 @@ class StorageBrowserPolicyFactory { } // Copyright (c) Microsoft Corporation. +/** + * RetryPolicy types. + */ +exports.StorageRetryPolicyType = void 0; (function (StorageRetryPolicyType) { /** * Exponential retry. Retry time delay grows exponentially. @@ -77586,7 +78229,7 @@ const DEFAULT_RETRY_OPTIONS = { retryDelayInMs: 4 * 1000, retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, secondaryHost: "", - tryTimeoutInMs: undefined // Use server side default timeout strategy + tryTimeoutInMs: undefined, // Use server side default timeout strategy }; const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); /** @@ -77623,7 +78266,7 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs, secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost - : DEFAULT_RETRY_OPTIONS.secondaryHost + : DEFAULT_RETRY_OPTIONS.secondaryHost, }; } /** @@ -77700,7 +78343,7 @@ class StorageRetryPolicy extends coreHttp.BaseRequestPolicy { "ENOTFOUND", "TIMEOUT", "EPIPE", - "REQUEST_SEND_ERROR" // For default xhr based http client provided in ms-rest-js + "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js ]; if (err) { for (const retriableError of retriableErrors) { @@ -77886,7 +78529,7 @@ class TelemetryPolicy extends coreHttp.BaseRequestPolicy { * @param request - */ async sendRequest(request) { - { + if (coreHttp.isNode) { if (!request.headers) { request.headers = new coreHttp.HttpHeaders(); } @@ -77909,7 +78552,7 @@ class TelemetryPolicyFactory { */ constructor(telemetry) { const userAgentInfo = []; - { + if (coreHttp.isNode) { if (telemetry) { const telemetryString = telemetry.userAgentPrefix || ""; if (telemetryString.length > 0 && userAgentInfo.indexOf(telemetryString) === -1) { @@ -77922,7 +78565,7 @@ class TelemetryPolicyFactory { userAgentInfo.push(libInfo); } // e.g. (NODE-VERSION 4.9.1; Windows_NT 10.0.16299) - const runtimeInfo = `(NODE-VERSION ${process.version}; ${os.type()} ${os.release()})`; + const runtimeInfo = `(NODE-VERSION ${process.version}; ${os__namespace.type()} ${os__namespace.release()})`; if (userAgentInfo.indexOf(runtimeInfo) === -1) { userAgentInfo.push(runtimeInfo); } @@ -77946,6 +78589,247 @@ function getCachedDefaultHttpClient() { return _defaultHttpClient; } +// Copyright (c) Microsoft Corporation. +/** + * A set of constants used internally when processing requests. + */ +const Constants = { + DefaultScope: "/.default", + /** + * Defines constants for use with HTTP headers. + */ + HeaderConstants: { + /** + * The Authorization header. + */ + AUTHORIZATION: "authorization", + }, +}; +// Default options for the cycler if none are provided +const DEFAULT_CYCLER_OPTIONS = { + forcedRefreshWindowInMs: 1000, + retryIntervalInMs: 3000, + refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry +}; +/** + * Converts an an unreliable access token getter (which may resolve with null) + * into an AccessTokenGetter by retrying the unreliable getter in a regular + * interval. + * + * @param getAccessToken - a function that produces a promise of an access + * token that may fail by returning null + * @param retryIntervalInMs - the time (in milliseconds) to wait between retry + * attempts + * @param timeoutInMs - the timestamp after which the refresh attempt will fail, + * throwing an exception + * @returns - a promise that, if it resolves, will resolve with an access token + */ +async function beginRefresh(getAccessToken, retryIntervalInMs, timeoutInMs) { + // This wrapper handles exceptions gracefully as long as we haven't exceeded + // the timeout. + async function tryGetAccessToken() { + if (Date.now() < timeoutInMs) { + try { + return await getAccessToken(); + } + catch (_a) { + return null; + } + } + else { + const finalToken = await getAccessToken(); + // Timeout is up, so throw if it's still null + if (finalToken === null) { + throw new Error("Failed to refresh access token."); + } + return finalToken; + } + } + let token = await tryGetAccessToken(); + while (token === null) { + await coreHttp.delay(retryIntervalInMs); + token = await tryGetAccessToken(); + } + return token; +} +/** + * Creates a token cycler from a credential, scopes, and optional settings. + * + * A token cycler represents a way to reliably retrieve a valid access token + * from a TokenCredential. It will handle initializing the token, refreshing it + * when it nears expiration, and synchronizes refresh attempts to avoid + * concurrency hazards. + * + * @param credential - the underlying TokenCredential that provides the access + * token + * @param scopes - the scopes to request authorization for + * @param tokenCyclerOptions - optionally override default settings for the cycler + * + * @returns - a function that reliably produces a valid access token + */ +function createTokenCycler(credential, scopes, tokenCyclerOptions) { + let refreshWorker = null; + let token = null; + const options = Object.assign(Object.assign({}, DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); + /** + * This little holder defines several predicates that we use to construct + * the rules of refreshing the token. + */ + const cycler = { + /** + * Produces true if a refresh job is currently in progress. + */ + get isRefreshing() { + return refreshWorker !== null; + }, + /** + * Produces true if the cycler SHOULD refresh (we are within the refresh + * window and not already refreshing) + */ + get shouldRefresh() { + var _a; + return (!cycler.isRefreshing && + ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now()); + }, + /** + * Produces true if the cycler MUST refresh (null or nearly-expired + * token). + */ + get mustRefresh() { + return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); + }, + }; + /** + * Starts a refresh job or returns the existing job if one is already + * running. + */ + function refresh(getTokenOptions) { + var _a; + if (!cycler.isRefreshing) { + // We bind `scopes` here to avoid passing it around a lot + const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); + // Take advantage of promise chaining to insert an assignment to `token` + // before the refresh can be considered done. + refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, + // If we don't have a token, then we should timeout immediately + (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) + .then((_token) => { + refreshWorker = null; + token = _token; + return token; + }) + .catch((reason) => { + // We also should reset the refresher if we enter a failed state. All + // existing awaiters will throw, but subsequent requests will start a + // new retry chain. + refreshWorker = null; + token = null; + throw reason; + }); + } + return refreshWorker; + } + return async (tokenOptions) => { + // + // Simple rules: + // - If we MUST refresh, then return the refresh task, blocking + // the pipeline until a token is available. + // - If we SHOULD refresh, then run refresh but don't return it + // (we can still use the cached token). + // - Return the token, since it's fine if we didn't return in + // step 1. + // + if (cycler.mustRefresh) + return refresh(tokenOptions); + if (cycler.shouldRefresh) { + refresh(tokenOptions); + } + return token; + }; +} +/** + * We will retrieve the challenge only if the response status code was 401, + * and if the response contained the header "WWW-Authenticate" with a non-empty value. + */ +function getChallenge(response) { + const challenge = response.headers.get("WWW-Authenticate"); + if (response.status === 401 && challenge) { + return challenge; + } + return; +} +/** + * Converts: `Bearer a="b" c="d"`. + * Into: `[ { a: 'b', c: 'd' }]`. + * + * @internal + */ +function parseChallenge(challenge) { + const bearerChallenge = challenge.slice("Bearer ".length); + const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); + const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); + // Key-value pairs to plain object: + return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); +} +// #endregion +/** + * Creates a new factory for a RequestPolicy that applies a bearer token to + * the requests' `Authorization` headers. + * + * @param credential - The TokenCredential implementation that can supply the bearer token. + * @param scopes - The scopes for which the bearer token applies. + */ +function storageBearerTokenChallengeAuthenticationPolicy(credential, scopes) { + // This simple function encapsulates the entire process of reliably retrieving the token + let getToken = createTokenCycler(credential, scopes); + class StorageBearerTokenChallengeAuthenticationPolicy extends coreHttp.BaseRequestPolicy { + constructor(nextPolicy, options) { + super(nextPolicy, options); + } + async sendRequest(webResource) { + if (!webResource.url.toLowerCase().startsWith("https://")) { + throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); + } + const getTokenInternal = getToken; + const token = (await getTokenInternal({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + })).token; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${token}`); + const response = await this._nextPolicy.sendRequest(webResource); + if ((response === null || response === void 0 ? void 0 : response.status) === 401) { + const challenge = getChallenge(response); + if (challenge) { + const challengeInfo = parseChallenge(challenge); + const challengeScopes = challengeInfo.resource_id + Constants.DefaultScope; + const parsedAuthUri = coreHttp.URLBuilder.parse(challengeInfo.authorization_uri); + const pathSegments = parsedAuthUri.getPath().split("/"); + const tenantId = pathSegments[1]; + const getTokenForChallenge = createTokenCycler(credential, challengeScopes); + const tokenForChallenge = (await getTokenForChallenge({ + abortSignal: webResource.abortSignal, + tracingOptions: { + tracingContext: webResource.tracingContext, + }, + tenantId: tenantId, + })).token; + getToken = getTokenForChallenge; + webResource.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${tokenForChallenge}`); + return this._nextPolicy.sendRequest(webResource); + } + } + return response; + } + } + return { + create: (nextPolicy, options) => { + return new StorageBearerTokenChallengeAuthenticationPolicy(nextPolicy, options); + }, + }; +} + // Copyright (c) Microsoft Corporation. /** * A helper to decide if a given argument satisfies the Pipeline contract @@ -77991,7 +78875,7 @@ class Pipeline { toServiceClientOptions() { return { httpClient: this.options.httpClient, - requestPolicyFactories: this.factories + requestPolicyFactories: this.factories, }; } } @@ -78003,6 +78887,7 @@ class Pipeline { * @returns A new Pipeline object. */ function newPipeline(credential, pipelineOptions = {}) { + var _a; if (credential === undefined) { credential = new AnonymousCredential(); } @@ -78024,16 +78909,16 @@ function newPipeline(credential, pipelineOptions = {}) { coreHttp.logPolicy({ logger: logger.info, allowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, - allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters - }) + allowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, + }), ]; - { + if (coreHttp.isNode) { // policies only available in Node.js runtime, not in browsers factories.push(coreHttp.proxyPolicy(pipelineOptions.proxyOptions)); factories.push(coreHttp.disableResponseDecompressionPolicy()); } factories.push(coreHttp.isTokenCredential(credential) - ? attachCredential(coreHttp.bearerTokenAuthenticationPolicy(credential, StorageOAuthScopes), credential) + ? attachCredential(storageBearerTokenChallengeAuthenticationPolicy(credential, (_a = pipelineOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes), credential) : credential); return new Pipeline(factories, pipelineOptions); } @@ -78060,7 +78945,9 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { */ signRequest(request) { request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); - if (request.body && typeof request.body === "string" && request.body.length > 0) { + if (request.body && + (typeof request.body === "string" || request.body !== undefined) && + request.body.length > 0) { request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } const stringToSign = [ @@ -78075,7 +78962,7 @@ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), - this.getHeaderValueToSign(request, HeaderConstants.RANGE) + this.getHeaderValueToSign(request, HeaderConstants.RANGE), ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request) + @@ -78204,9 +79091,7 @@ class StorageSharedKeyCredential extends Credential { * @param stringToSign - */ computeHMACSHA256(stringToSign) { - return crypto.createHmac("sha256", this.accountKey) - .update(stringToSign, "utf8") - .digest("base64"); + return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } } @@ -78218,8 +79103,8 @@ class StorageSharedKeyCredential extends Credential { * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ const packageName = "azure-storage-blob"; -const packageVersion = "12.8.0"; -class StorageClientContext extends coreHttp.ServiceClient { +const packageVersion = "12.9.0"; +class StorageClientContext extends coreHttp__namespace.ServiceClient { /** * Initializes a new instance of the StorageClientContext class. * @param url The URL of the service account, container, or blob that is the target of the desired @@ -78235,7 +79120,7 @@ class StorageClientContext extends coreHttp.ServiceClient { options = {}; } if (!options.userAgent) { - const defaultUserAgent = coreHttp.getDefaultUserAgentValue(); + const defaultUserAgent = coreHttp__namespace.getDefaultUserAgentValue(); options.userAgent = `${packageName}/${packageVersion} ${defaultUserAgent}`; } super(undefined, options); @@ -78244,7 +79129,7 @@ class StorageClientContext extends coreHttp.ServiceClient { // Parameter assignments this.url = url; // Assigning values to Constant parameters - this.version = options.version || "2020-10-02"; + this.version = options.version || "2021-04-10"; } } @@ -78291,7 +79176,7 @@ class StorageClient { */ const createSpan = coreTracing.createSpanFunction({ packagePrefix: "Azure.Storage.Blob", - namespace: "Microsoft.Storage" + namespace: "Microsoft.Storage", }); /** * @internal @@ -78305,7 +79190,7 @@ function convertTracingToRequestOptionsBase(options) { return { // By passing spanOptions if they exist at runtime, we're backwards compatible with @azure/core-tracing@preview.13 and earlier. spanOptions: (_a = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _a === void 0 ? void 0 : _a.spanOptions, - tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext + tracingContext: (_b = options === null || options === void 0 ? void 0 : options.tracingOptions) === null || _b === void 0 ? void 0 : _b.tracingContext, }; } @@ -78362,6 +79247,10 @@ class BlobSASPermissions { * Specifies SetImmutabilityPolicy access granted. */ this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; } /** * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an @@ -78403,6 +79292,9 @@ class BlobSASPermissions { case "i": blobSASPermissions.setImmutabilityPolicy = true; break; + case "y": + blobSASPermissions.permanentDelete = true; + break; default: throw new RangeError(`Invalid permission: ${char}`); } @@ -78447,6 +79339,9 @@ class BlobSASPermissions { if (permissionLike.setImmutabilityPolicy) { blobSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + blobSASPermissions.permanentDelete = true; + } return blobSASPermissions; } /** @@ -78487,6 +79382,9 @@ class BlobSASPermissions { if (this.setImmutabilityPolicy) { permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } return permissions.join(""); } } @@ -78546,6 +79444,14 @@ class ContainerSASPermissions { * Specifies SetImmutabilityPolicy access granted. */ this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; + /** + * Specifies that Filter Blobs by Tags is permitted. + */ + this.filterByTags = false; } /** * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an @@ -78590,6 +79496,12 @@ class ContainerSASPermissions { case "i": containerSASPermissions.setImmutabilityPolicy = true; break; + case "y": + containerSASPermissions.permanentDelete = true; + break; + case "f": + containerSASPermissions.filterByTags = true; + break; default: throw new RangeError(`Invalid permission ${char}`); } @@ -78637,6 +79549,12 @@ class ContainerSASPermissions { if (permissionLike.setImmutabilityPolicy) { containerSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + containerSASPermissions.permanentDelete = true; + } + if (permissionLike.filterByTags) { + containerSASPermissions.filterByTags = true; + } return containerSASPermissions; } /** @@ -78682,6 +79600,12 @@ class ContainerSASPermissions { if (this.setImmutabilityPolicy) { permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } + if (this.filterByTags) { + permissions.push("f"); + } return permissions.join(""); } } @@ -78711,9 +79635,7 @@ class UserDelegationKeyCredential { */ computeHMACSHA256(stringToSign) { // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); - return crypto.createHmac("sha256", this.key) - .update(stringToSign, "utf8") - .digest("base64"); + return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } } @@ -78731,6 +79653,10 @@ function ipRangeToString(ipRange) { } // Copyright (c) Microsoft Corporation. +/** + * Protocols for generated SAS. + */ +exports.SASProtocol = void 0; (function (SASProtocol) { /** * Protocol that allows HTTPS only @@ -78751,7 +79677,7 @@ function ipRangeToString(ipRange) { * NOTE: Instances of this class are immutable. */ class SASQueryParameters { - constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId) { + constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { this.version = version; this.signature = signature; if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { @@ -78764,6 +79690,7 @@ class SASQueryParameters { this.expiresOn = permissionsOrOptions.expiresOn; this.ipRangeInner = permissionsOrOptions.ipRange; this.identifier = permissionsOrOptions.identifier; + this.encryptionScope = permissionsOrOptions.encryptionScope; this.resource = permissionsOrOptions.resource; this.cacheControl = permissionsOrOptions.cacheControl; this.contentDisposition = permissionsOrOptions.contentDisposition; @@ -78789,6 +79716,7 @@ class SASQueryParameters { this.protocol = protocol; this.startsOn = startsOn; this.ipRangeInner = ipRange; + this.encryptionScope = encryptionScope; this.identifier = identifier; this.resource = resource; this.cacheControl = cacheControl; @@ -78817,7 +79745,7 @@ class SASQueryParameters { if (this.ipRangeInner) { return { end: this.ipRangeInner.end, - start: this.ipRangeInner.start + start: this.ipRangeInner.start, }; } return undefined; @@ -78836,6 +79764,7 @@ class SASQueryParameters { "se", "sip", "si", + "ses", "skoid", "sktid", "skt", @@ -78851,7 +79780,7 @@ class SASQueryParameters { "rscl", "rsct", "saoid", - "scid" + "scid", ]; const queries = []; for (const param of params) { @@ -78880,6 +79809,9 @@ class SASQueryParameters { case "si": this.tryAppendQueryParameter(queries, param, this.identifier); break; + case "ses": + this.tryAppendQueryParameter(queries, param, this.encryptionScope); + break; case "skoid": // Signed object ID this.tryAppendQueryParameter(queries, param, this.signedOid); break; @@ -78964,6 +79896,15 @@ function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredent if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); } + // Version 2020-12-06 adds support for encryptionscope in SAS. + if (version >= "2020-12-06") { + if (sharedKeyCredential !== undefined) { + return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); + } + else { + return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); + } + } // Version 2019-12-12 adds support for the blob tags permission. // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string @@ -79045,7 +79986,7 @@ function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKe blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); @@ -79114,11 +80055,81 @@ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKe blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", - blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "" + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType); } +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn and identifier. + * + * WARNING: When identifier is not provided, permissions and expiresOn are required. + * You MUST assign value to identifier or expiresOn & permissions manually if you initial with + * this constructor. + * + * @param blobSASSignatureValues - + * @param sharedKeyCredential - + */ +function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + if (!blobSASSignatureValues.identifier && + !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + blobSASSignatureValues.identifier, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", + blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", + blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", + blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", + blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", + ].join("\n"); + const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope); +} /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. @@ -79190,7 +80201,7 @@ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userD blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType + blobSASSignatureValues.contentType, ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey); @@ -79269,11 +80280,91 @@ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userD blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, - blobSASSignatureValues.contentType + blobSASSignatureValues.contentType, ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId); } +/** + * ONLY AVAILABLE IN NODE.JS RUNTIME. + * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. + * + * Creates an instance of SASQueryParameters. + * + * Only accepts required settings needed to create a SAS. For optional settings please + * set corresponding properties directly, such as permissions, startsOn. + * + * WARNING: identifier will be ignored, permissions and expiresOn are required. + * + * @param blobSASSignatureValues - + * @param userDelegationKeyCredential - + */ +function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { + blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); + // Stored access policies are not supported for a user delegation SAS. + if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { + throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); + } + let resource = "c"; + let timestamp = blobSASSignatureValues.snapshotTime; + if (blobSASSignatureValues.blobName) { + resource = "b"; + if (blobSASSignatureValues.snapshotTime) { + resource = "bs"; + } + else if (blobSASSignatureValues.versionId) { + resource = "bv"; + timestamp = blobSASSignatureValues.versionId; + } + } + // Calling parse and toString guarantees the proper ordering and throws on invalid characters. + let verifiedPermissions; + if (blobSASSignatureValues.permissions) { + if (blobSASSignatureValues.blobName) { + verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + else { + verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); + } + } + // Signature is generated on the un-url-encoded values. + const stringToSign = [ + verifiedPermissions ? verifiedPermissions : "", + blobSASSignatureValues.startsOn + ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) + : "", + blobSASSignatureValues.expiresOn + ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) + : "", + getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), + userDelegationKeyCredential.userDelegationKey.signedObjectId, + userDelegationKeyCredential.userDelegationKey.signedTenantId, + userDelegationKeyCredential.userDelegationKey.signedStartsOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedExpiresOn + ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) + : "", + userDelegationKeyCredential.userDelegationKey.signedService, + userDelegationKeyCredential.userDelegationKey.signedVersion, + blobSASSignatureValues.preauthorizedAgentObjectId, + undefined, + blobSASSignatureValues.correlationId, + blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", + blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", + blobSASSignatureValues.version, + resource, + timestamp, + blobSASSignatureValues.encryptionScope, + blobSASSignatureValues.cacheControl, + blobSASSignatureValues.contentDisposition, + blobSASSignatureValues.contentEncoding, + blobSASSignatureValues.contentLanguage, + blobSASSignatureValues.contentType, + ].join("\n"); + const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); + return new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope); +} function getCanonicalName(accountName, containerName, blobName) { // Container: "/blob/account/containerName" // Blob: "/blob/account/containerName/blobName" @@ -79307,6 +80398,11 @@ function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); } + if (blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); + } if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version < "2019-12-12") { @@ -79317,10 +80413,18 @@ function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); } + if (version < "2021-04-10" && + blobSASSignatureValues.permissions && + blobSASSignatureValues.permissions.filterByTags) { + throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); + } if (version < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); } + if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } blobSASSignatureValues.version = version; return blobSASSignatureValues; } @@ -79394,7 +80498,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -79429,7 +80533,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -79462,7 +80566,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -79494,7 +80598,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -79529,7 +80633,7 @@ class BlobLeaseClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -79609,8 +80713,7 @@ class RetriableReadableStream extends stream.Readable { }); } else { - this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this - .offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); + this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); } } else { @@ -80455,7 +81558,7 @@ class AvroReader { } async initialize(options = {}) { const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); if (!arraysEqual(header, AVRO_INIT_BYTES)) { throw new Error("Stream is not an Avro file."); @@ -80463,7 +81566,7 @@ class AvroReader { // File metadata is written as if defined by the following map schema: // { "type": "map", "values": "bytes"} this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); // Validate codec const codec = this._metadata[AVRO_CODEC_KEY]; @@ -80472,7 +81575,7 @@ class AvroReader { } // The 16-byte, randomly-generated sync marker for this file. this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); // Parse the schema const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); @@ -80481,7 +81584,7 @@ class AvroReader { this._blockOffset = this._initialBlockOffset + this._dataStream.position; } this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); // skip block length await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); @@ -80503,13 +81606,13 @@ class AvroReader { } while (this.hasNext()) { const result = yield tslib.__await(this._itemType.read(this._dataStream, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, })); this._itemsRemainingInBlock--; this._objectIndex++; if (this._itemsRemainingInBlock == 0) { const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, })); this._blockOffset = this._initialBlockOffset + this._dataStream.position; this._objectIndex = 0; @@ -80518,7 +81621,7 @@ class AvroReader { } try { this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, })); } catch (err) { @@ -80717,7 +81820,7 @@ class BlobQuickQueryStream extends stream.Readable { position, name, isFatal: fatal, - description + description, }); } break; @@ -81093,6 +82196,11 @@ class BlobQueryResponse { } // Copyright (c) Microsoft Corporation. +/** + * Represents the access tier on a blob. + * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} + */ +exports.BlockBlobTier = void 0; (function (BlockBlobTier) { /** * Optimized for storing data that is accessed frequently. @@ -81108,6 +82216,12 @@ class BlobQueryResponse { */ BlockBlobTier["Archive"] = "Archive"; })(exports.BlockBlobTier || (exports.BlockBlobTier = {})); +/** + * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. + * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} + * for detailed information on the corresponding IOPS and throughput per PageBlobTier. + */ +exports.PremiumPageBlobTier = void 0; (function (PremiumPageBlobTier) { /** * P4 Tier. @@ -81168,6 +82282,20 @@ function ensureCpkIfSpecified(cpk, isHttps) { cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; } } +/** + * Defines the known cloud audiences for Storage. + */ +exports.StorageBlobAudience = void 0; +(function (StorageBlobAudience) { + /** + * The OAuth scope to use to retrieve an AAD token for Azure Storage. + */ + StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; + /** + * The OAuth scope to use to retrieve an AAD token for Azure Disk. + */ + StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; +})(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. @@ -81180,16 +82308,16 @@ function ensureCpkIfSpecified(cpk, isHttps) { function rangeResponseFromModel(response) { const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ offset: x.start, - count: x.end - x.start + count: x.end - x.start, })); const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ offset: x.start, - count: x.end - x.start + count: x.end - x.start, })); return Object.assign(Object.assign({}, response), { pageRange, clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { pageRange, - clearRange + clearRange, } }) }); } @@ -81202,7 +82330,7 @@ function rangeResponseFromModel(response) { */ class BlobBeginCopyFromUrlPoller extends coreLro.Poller { constructor(options) { - const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions } = options; + const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; let state; if (resumeFrom) { state = JSON.parse(resumeFrom).state; @@ -81238,7 +82366,7 @@ const cancel = async function cancel(options = {}) { } // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call await state.blobClient.abortCopyFromURL(copyId, { - abortSignal: options.abortSignal + abortSignal: options.abortSignal, }); state.isCancelled = true; return makeBlobBeginCopyFromURLPollOperation(state); @@ -81316,7 +82444,7 @@ function makeBlobBeginCopyFromURLPollOperation(state) { state: Object.assign({}, state), cancel, toString, - update + update, }; } @@ -81958,7 +83086,7 @@ async function streamToBuffer2(stream, buffer, encoding) { */ async function readStreamToLocalFile(rs, file) { return new Promise((resolve, reject) => { - const ws = fs.createWriteStream(file); + const ws = fs__namespace.createWriteStream(file); rs.on("error", (err) => { reject(err); }); @@ -81974,8 +83102,8 @@ async function readStreamToLocalFile(rs, file) { * * Promisified version of fs.stat(). */ -const fsStat = util.promisify(fs.stat); -const fsCreateReadStream = fs.createReadStream; +const fsStat = util__namespace.promisify(fs__namespace.stat); +const fsCreateReadStream = fs__namespace.createReadStream; /** * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, @@ -82018,12 +83146,17 @@ class BlobClient extends StorageClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -82040,10 +83173,8 @@ class BlobClient extends StorageClient { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); - ({ - blobName: this._name, - containerName: this._containerName - } = this.getBlobAndContainerNamesFromUrl()); + ({ blobName: this._name, containerName: this._containerName } = + this.getBlobAndContainerNamesFromUrl()); this.blobContext = new Blob$1(this.storageClientContext); this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); @@ -82168,11 +83299,13 @@ class BlobClient extends StorageClient { const { span, updatedOptions } = createSpan("BlobClient-download", options); try { const res = await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress // for Node.js, progress is reported by RetriableReadableStream + onDownloadProgress: coreHttp.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey }, convertTracingToRequestOptionsBase(updatedOptions))); const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); // Return browser response immediately - if (false) {} + if (!coreHttp.isNode) { + return wrappedRes; + } // We support retrying when download stream unexpected ends in Node.js runtime // Following code shouldn't be bundled into browser build, however some // bundlers may try to bundle following code and "FileReadResponse.ts". @@ -82197,16 +83330,16 @@ class BlobClient extends StorageClient { ifModifiedSince: options.conditions.ifModifiedSince, ifNoneMatch: options.conditions.ifNoneMatch, ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, - ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions + ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, }, range: rangeToString({ count: offset + res.contentLength - start, - offset: start + offset: start, }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, - cpkInfo: options.customerProvidedKey + cpkInfo: options.customerProvidedKey, }; // Debug purpose only // console.log( @@ -82217,13 +83350,13 @@ class BlobClient extends StorageClient { return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; }, offset, res.contentLength, { maxRetryRequests: options.maxRetryRequests, - onProgress: options.onProgress + onProgress: options.onProgress, }); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82248,21 +83381,23 @@ class BlobClient extends StorageClient { abortSignal: options.abortSignal, customerProvidedKey: options.customerProvidedKey, conditions: options.conditions, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); return true; } catch (e) { if (e.statusCode === 404) { - span.setStatus({ - code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking blob existence" - }); + // Expected exception when checking blob existence return false; } + else if (e.statusCode === 409 && + e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg) { + // Expected exception when checking blob existence + return true; + } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82294,7 +83429,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82321,7 +83456,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82343,20 +83478,19 @@ class BlobClient extends StorageClient { const { span, updatedOptions } = createSpan("BlobClient-deleteIfExists", options); try { const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a blob or snapshot only if it exists." + message: "Expected exception when deleting a blob or snapshot only if it exists.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82380,7 +83514,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82414,7 +83548,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82444,7 +83578,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82470,7 +83604,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82494,7 +83628,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82528,7 +83662,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82612,7 +83746,7 @@ class BlobClient extends StorageClient { const client = { abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), getProperties: (...args) => this.getProperties(...args), - startCopyFromURL: (...args) => this.startCopyFromURL(...args) + startCopyFromURL: (...args) => this.startCopyFromURL(...args), }; const poller = new BlobBeginCopyFromUrlPoller({ blobClient: client, @@ -82620,7 +83754,7 @@ class BlobClient extends StorageClient { intervalInMs: options.intervalInMs, onProgress: options.onProgress, resumeFrom: options.resumeFrom, - startCopyFromURLOptions: options + startCopyFromURLOptions: options, }); // Trigger the startCopyFromURL call by calling poll. // Any errors from this method should be surfaced to the user. @@ -82643,7 +83777,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82669,13 +83803,13 @@ class BlobClient extends StorageClient { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince - }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold }, convertTracingToRequestOptionsBase(updatedOptions))); + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, + }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82703,7 +83837,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82780,7 +83914,7 @@ class BlobClient extends StorageClient { conditions: options.conditions, maxRetryRequests: options.maxRetryRequestsPerBlock, customerProvidedKey: options.customerProvidedKey, - tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) + tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)), }); const stream = response.readableStreamBody; await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); @@ -82799,7 +83933,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82837,7 +83971,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82918,13 +84052,13 @@ class BlobClient extends StorageClient { sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, - sourceIfTags: options.sourceConditions.tagConditions + sourceIfTags: options.sourceConditions.tagConditions, }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82965,7 +84099,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -82986,7 +84120,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83007,7 +84141,7 @@ class BlobClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83058,12 +84192,17 @@ class AppendBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -83118,7 +84257,7 @@ class AppendBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83139,20 +84278,19 @@ class AppendBlobClient extends BlobClient { const conditions = { ifNoneMatch: ETagAny }; try { const res = await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." + message: "Expected exception when creating a blob only if it does not already exist.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83175,7 +84313,7 @@ class AppendBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83214,13 +84352,13 @@ class AppendBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.appendBlobContext.appendBlock(contentLength, body, Object.assign({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83253,13 +84391,13 @@ class AppendBlobClient extends BlobClient { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83310,12 +84448,17 @@ class BlockBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -83381,23 +84524,25 @@ class BlockBlobClient extends BlobClient { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); const { span, updatedOptions } = createSpan("BlockBlobClient-query", options); try { - if (false) {} + if (!coreHttp.isNode) { + throw new Error("This operation currently is only supported in Node.js."); + } const response = await this._blobContext.query(Object.assign({ abortSignal: options.abortSignal, queryRequest: { queryType: "SQL", expression: query, inputSerialization: toQuerySerialization(options.inputTextConfiguration), - outputSerialization: toQuerySerialization(options.outputTextConfiguration) + outputSerialization: toQuerySerialization(options.outputTextConfiguration), }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }) }, convertTracingToRequestOptionsBase(updatedOptions))); return new BlobQueryResponse(response, { abortSignal: options.abortSignal, onProgress: options.onProgress, - onError: options.onError + onError: options.onError, }); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83439,13 +84584,13 @@ class BlockBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.blockBlobContext.upload(contentLength, body, Object.assign({ abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83482,13 +84627,13 @@ class BlockBlobClient extends BlobClient { sourceIfModifiedSince: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifModifiedSince, sourceIfNoneMatch: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch, sourceIfUnmodifiedSince: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifUnmodifiedSince, - sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions + sourceIfTags: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.tagConditions, }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags) }), convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83512,13 +84657,13 @@ class BlockBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.blockBlobContext.stageBlock(blockId, contentLength, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83556,7 +84701,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83587,7 +84732,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83621,7 +84766,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83648,7 +84793,7 @@ class BlockBlobClient extends BlobClient { async uploadData(data, options = {}) { const { span, updatedOptions } = createSpan("BlockBlobClient-uploadData", options); try { - if (true) { + if (coreHttp.isNode) { let buffer; if (data instanceof Buffer) { buffer = data; @@ -83662,12 +84807,15 @@ class BlockBlobClient extends BlobClient { } return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); } - else {} + else { + const browserBlob = new Blob([data]); + return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); + } } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83703,7 +84851,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83716,7 +84864,7 @@ class BlockBlobClient extends BlobClient { * Uploads data to block blob. Requires a bodyFactory as the data source, * which need to return a {@link HttpRequestBody} object with the offset and size provided. * - * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is + * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} * to commit the block list. @@ -83782,14 +84930,14 @@ class BlockBlobClient extends BlobClient { abortSignal: options.abortSignal, conditions: options.conditions, encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); // Update progress after block is successfully uploaded to server, in case of block trying // TODO: Hook with convenience layer progress event in finer level transferProgress += contentLength; if (options.onProgress) { options.onProgress({ - loadedBytes: transferProgress + loadedBytes: transferProgress, }); } }); @@ -83800,7 +84948,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83829,14 +84977,14 @@ class BlockBlobClient extends BlobClient { return () => fsCreateReadStream(filePath, { autoClose: true, end: count ? offset + count - 1 : Infinity, - start: offset + start: offset, }); }, size, Object.assign(Object.assign({}, options), { tracingOptions: Object.assign(Object.assign({}, options.tracingOptions), convertTracingToRequestOptionsBase(updatedOptions)) })); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83880,7 +85028,7 @@ class BlockBlobClient extends BlobClient { await this.stageBlock(blockID, body, length, { conditions: options.conditions, encryptionScope: options.encryptionScope, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); // Update progress after block is successfully uploaded to server, in case of block trying transferProgress += length; @@ -83899,7 +85047,7 @@ class BlockBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -83950,12 +85098,17 @@ class PageBlobClient extends BlobClient { const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -84005,7 +85158,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84028,20 +85181,19 @@ class PageBlobClient extends BlobClient { try { const conditions = { ifNoneMatch: ETagAny }; const res = await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions })); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a blob only if it does not already exist." + message: "Expected exception when creating a blob only if it does not already exist.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84066,13 +85218,13 @@ class PageBlobClient extends BlobClient { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return await this.pageBlobContext.uploadPages(count, body, Object.assign({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { - onUploadProgress: options.onProgress + onUploadProgress: options.onProgress, }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84102,13 +85254,13 @@ class PageBlobClient extends BlobClient { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, - sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince + sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization) }, convertTracingToRequestOptionsBase(updatedOptions))); } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84135,7 +85287,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84164,7 +85316,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84194,7 +85346,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84224,7 +85376,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84250,7 +85402,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84277,7 +85429,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84307,7 +85459,7 @@ class PageBlobClient extends BlobClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84455,7 +85607,7 @@ class BatchResponseParser { return { subResponses: deserializedSubResponses, subResponsesSucceededCount: subResponsesSucceededCount, - subResponsesFailedCount: subResponsesFailedCount + subResponsesFailedCount: subResponsesFailedCount, }; } } @@ -84602,7 +85754,7 @@ class BlobBatch { this.setBatchType("delete"); await this.addSubRequestInternal({ url: url, - credential: credential + credential: credential, }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); @@ -84610,7 +85762,7 @@ class BlobBatch { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84649,7 +85801,7 @@ class BlobBatch { this.setBatchType("setAccessTier"); await this.addSubRequestInternal({ url: url, - credential: credential + credential: credential, }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); }); @@ -84657,7 +85809,7 @@ class BlobBatch { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84714,7 +85866,7 @@ class InnerBatchRequest { this.subRequestPrefix, `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, "", - `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}` // sub request start line with method + `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method ].join(HTTP_LINE_ENDING); for (const header of request.headers.headersArray()) { this.body += `${header.name}: ${header.value}${HTTP_LINE_ENDING}`; @@ -84754,7 +85906,7 @@ class BatchRequestAssemblePolicy extends coreHttp.BaseRequestPolicy { this.dummyResponse = { request: new coreHttp.WebResource(), status: 200, - headers: new coreHttp.HttpHeaders() + headers: new coreHttp.HttpHeaders(), }; this.batchRequest = batchRequest; } @@ -84921,14 +86073,14 @@ class BlobBatchClient { version: rawBatchResponse.version, subResponses: responseSummary.subResponses, subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, - subResponsesFailedCount: responseSummary.subResponsesFailedCount + subResponsesFailedCount: responseSummary.subResponsesFailedCount, }; return res; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -84974,12 +86126,17 @@ class ContainerClient extends StorageClient { const containerName = credentialOrPipelineOrContainerName; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } pipeline = newPipeline(sharedKeyCredential, options); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { url = @@ -85031,7 +86188,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85051,20 +86208,19 @@ class ContainerClient extends StorageClient { const { span, updatedOptions } = createSpan("ContainerClient-createIfNotExists", options); try { const res = await this.create(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when creating a container only if it does not already exist." + message: "Expected exception when creating a container only if it does not already exist.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85086,7 +86242,7 @@ class ContainerClient extends StorageClient { try { await this.getProperties({ abortSignal: options.abortSignal, - tracingOptions: updatedOptions.tracingOptions + tracingOptions: updatedOptions.tracingOptions, }); return true; } @@ -85094,13 +86250,13 @@ class ContainerClient extends StorageClient { if (e.statusCode === 404) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when checking container existence" + message: "Expected exception when checking container existence", }); return false; } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85174,7 +86330,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85200,7 +86356,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85220,20 +86376,19 @@ class ContainerClient extends StorageClient { const { span, updatedOptions } = createSpan("ContainerClient-deleteIfExists", options); try { const res = await this.delete(updatedOptions); - return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response // _response is made non-enumerable - }); + return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: "Expected exception when deleting a container only if it exists." + message: "Expected exception when deleting a container only if it exists.", }); return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85267,7 +86422,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85303,13 +86458,13 @@ class ContainerClient extends StorageClient { requestId: response.requestId, clientRequestId: response.clientRequestId, signedIdentifiers: [], - version: response.version + version: response.version, }; for (const identifier of response) { let accessPolicy = undefined; if (identifier.accessPolicy) { accessPolicy = { - permissions: identifier.accessPolicy.permissions + permissions: identifier.accessPolicy.permissions, }; if (identifier.accessPolicy.expiresOn) { accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); @@ -85320,7 +86475,7 @@ class ContainerClient extends StorageClient { } res.signedIdentifiers.push({ accessPolicy, - id: identifier.id + id: identifier.id, }); } return res; @@ -85328,7 +86483,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85367,9 +86522,9 @@ class ContainerClient extends StorageClient { permissions: identifier.accessPolicy.permissions, startsOn: identifier.accessPolicy.startsOn ? truncatedISO8061Date(identifier.accessPolicy.startsOn) - : "" + : "", }, - id: identifier.id + id: identifier.id, }); } return await this.containerContext.setAccessPolicy(Object.assign({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions }, convertTracingToRequestOptionsBase(updatedOptions))); @@ -85377,7 +86532,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85423,13 +86578,13 @@ class ContainerClient extends StorageClient { const response = await blockBlobClient.upload(body, contentLength, updatedOptions); return { blockBlobClient, - response + response, }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85460,7 +86615,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85482,8 +86637,12 @@ class ContainerClient extends StorageClient { const { span, updatedOptions } = createSpan("ContainerClient-listBlobFlatSegment", options); try { const response = await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); return blobItem; }) }) }); return wrappedResponse; @@ -85491,7 +86650,7 @@ class ContainerClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85511,19 +86670,33 @@ class ContainerClient extends StorageClient { * @param options - Options to Container List Blob Hierarchy Segment operation. */ async listBlobHierarchySegment(delimiter, marker, options = {}) { + var _a; const { span, updatedOptions } = createSpan("ContainerClient-listBlobHierarchySegment", options); try { const response = await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), convertTracingToRequestOptionsBase(updatedOptions))); - const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { - const blobItem = Object.assign(Object.assign({}, blobItemInteral), { tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); + response.segment.blobItems = []; + if (response.segment["Blob"] !== undefined) { + response.segment.blobItems = ProcessBlobItems(response.segment["Blob"]); + } + response.segment.blobPrefixes = []; + if (response.segment["BlobPrefix"] !== undefined) { + response.segment.blobPrefixes = ProcessBlobPrefixes(response.segment["BlobPrefix"]); + } + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInteral) => { + const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name), tags: toTags(blobItemInteral.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInteral.objectReplicationMetadata) }); return blobItem; + }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { + const blobPrefix = { + name: BlobNameToString(blobPrefixInternal.name), + }; + return blobPrefix; }) }) }); return wrappedResponse; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -85705,7 +86878,7 @@ class ContainerClient extends StorageClient { */ byPage: (settings = {}) => { return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); - } + }, }; } /** @@ -85779,7 +86952,7 @@ class ContainerClient extends StorageClient { * if (item.kind === "prefix") { * console.log(`\tBlobPrefix: ${item.name}`); * } else { - * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${item.name}`); * } * } * ``` @@ -85794,7 +86967,7 @@ class ContainerClient extends StorageClient { * if (item.kind === "prefix") { * console.log(`\tBlobPrefix: ${item.name}`); * } else { - * console.log(`\tBlobItem: name - ${item.name}, last modified - ${item.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${item.name}`); * } * entity = await iter.next(); * } @@ -85812,7 +86985,7 @@ class ContainerClient extends StorageClient { * } * } * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` @@ -85823,7 +86996,9 @@ class ContainerClient extends StorageClient { * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); * * let i = 1; - * for await (const response of containerClient.listBlobsByHierarchy("/", { prefix: "prefix2/sub1/"}).byPage({ maxPageSize: 2 })) { + * for await (const response of containerClient + * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) + * .byPage({ maxPageSize: 2 })) { * console.log(`Page ${i++}`); * const segment = response.segment; * @@ -85834,7 +87009,7 @@ class ContainerClient extends StorageClient { * } * * for (const blob of response.segment.blobItems) { - * console.log(`\tBlobItem: name - ${blob.name}, last modified - ${blob.properties.lastModified}`); + * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` @@ -85901,7 +87076,208 @@ class ContainerClient extends StorageClient { */ byPage: (settings = {}) => { return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); + }, + }; + } + /** + * The Filter Blobs operation enables callers to list blobs in the container whose tags + * match a given search expression. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { + const { span, updatedOptions } = createSpan("ContainerClient-findBlobsByTagsSegment", options); + try { + const response = await this.containerContext.filterBlobs(Object.assign({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize }, convertTracingToRequestOptionsBase(updatedOptions))); + const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { + var _a; + let tagValue = ""; + if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { + tagValue = blob.tags.blobTagSet[0].value; + } + return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); + }) }); + return wrappedResponse; + } + catch (e) { + span.setStatus({ + code: coreTracing.SpanStatusCode.ERROR, + message: e.message, + }); + throw e; + } + finally { + span.end(); + } + } + /** + * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param marker - A string value that identifies the portion of + * the list of blobs to be returned with the next listing operation. The + * operation returns the continuationToken value within the response body if the + * listing operation did not return all blobs remaining to be listed + * with the current page. The continuationToken value can be used as the value for + * the marker parameter in a subsequent call to request the next page of list + * items. The marker value is opaque to the client. + * @param options - Options to find blobs by tags. + */ + findBlobsByTagsSegments(tagFilterSqlExpression, marker, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1() { + let response; + if (!!marker || marker === undefined) { + do { + response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); + response.blobs = response.blobs || []; + marker = response.continuationToken; + yield yield tslib.__await(response); + } while (marker); } + }); + } + /** + * Returns an AsyncIterableIterator for blobs. + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to findBlobsByTagsItems. + */ + findBlobsByTagsItems(tagFilterSqlExpression, options = {}) { + return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1() { + var e_3, _a; + let marker; + try { + for (var _b = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _c; _c = yield tslib.__await(_b.next()), !_c.done;) { + const segment = _c.value; + yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); + } + } + catch (e_3_1) { e_3 = { error: e_3_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield tslib.__await(_a.call(_b)); + } + finally { if (e_3) throw e_3.error; } + } + }); + } + /** + * Returns an async iterable iterator to find all blobs with specified tag + * under the specified container. + * + * .byPage() returns an async iterable iterator to list the blobs in pages. + * + * Example using `for await` syntax: + * + * ```js + * let i = 1; + * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * ``` + * + * Example using `iter.next()`: + * + * ```js + * let i = 1; + * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); + * let blobItem = await iter.next(); + * while (!blobItem.done) { + * console.log(`Blob ${i++}: ${blobItem.value.name}`); + * blobItem = await iter.next(); + * } + * ``` + * + * Example using `byPage()`: + * + * ```js + * // passing optional maxPageSize in the page settings + * let i = 1; + * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * } + * ``` + * + * Example using paging with a marker: + * + * ```js + * let i = 1; + * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); + * let response = (await iterator.next()).value; + * + * // Prints 2 blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * + * // Gets next marker + * let marker = response.continuationToken; + * // Passing next marker as continuationToken + * iterator = containerClient + * .findBlobsByTags("tagkey='tagvalue'") + * .byPage({ continuationToken: marker, maxPageSize: 10 }); + * response = (await iterator.next()).value; + * + * // Prints blob names + * if (response.blobs) { + * for (const blob of response.blobs) { + * console.log(`Blob ${i++}: ${blob.name}`); + * } + * } + * ``` + * + * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. + * The given expression must evaluate to true for a blob to be returned in the results. + * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; + * however, only a subset of the OData filter syntax is supported in the Blob service. + * @param options - Options to find blobs by tags. + */ + findBlobsByTags(tagFilterSqlExpression, options = {}) { + // AsyncIterableIterator to iterate over blobs + const listSegmentOptions = Object.assign({}, options); + const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); + return { + /** + * The next method, part of the iteration protocol + */ + next() { + return iter.next(); + }, + /** + * The connection to the async iterator, part of the iteration protocol + */ + [Symbol.asyncIterator]() { + return this; + }, + /** + * Return an AsyncIterableIterator that works a page at a time + */ + byPage: (settings = {}) => { + return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); + }, }; } getContainerNameFromUrl() { @@ -86034,6 +87410,10 @@ class AccountSASPermissions { * Permission to set immutability policy. */ this.setImmutabilityPolicy = false; + /** + * Specifies that Permanent Delete is permitted. + */ + this.permanentDelete = false; } /** * Parse initializes the AccountSASPermissions fields from a string. @@ -86080,6 +87460,9 @@ class AccountSASPermissions { case "i": accountSASPermissions.setImmutabilityPolicy = true; break; + case "y": + accountSASPermissions.permanentDelete = true; + break; default: throw new RangeError(`Invalid permission character: ${c}`); } @@ -86130,6 +87513,9 @@ class AccountSASPermissions { if (permissionLike.setImmutabilityPolicy) { accountSASPermissions.setImmutabilityPolicy = true; } + if (permissionLike.permanentDelete) { + accountSASPermissions.permanentDelete = true; + } return accountSASPermissions; } /** @@ -86183,6 +87569,9 @@ class AccountSASPermissions { if (this.setImmutabilityPolicy) { permissions.push("i"); } + if (this.permanentDelete) { + permissions.push("y"); + } return permissions.join(""); } } @@ -86365,6 +87754,11 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); } + if (accountSASSignatureValues.permissions && + accountSASSignatureValues.permissions.permanentDelete && + version < "2019-10-10") { + throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); + } if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version < "2019-12-12") { @@ -86375,25 +87769,48 @@ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyC version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); } + if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { + throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); + } const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); - const stringToSign = [ - sharedKeyCredential.accountName, - parsedPermissions, - parsedServices, - parsedResourceTypes, - accountSASSignatureValues.startsOn - ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) - : "", - truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), - accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", - accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", - version, - "" // Account SAS requires an additional newline character - ].join("\n"); + let stringToSign; + if (version >= "2020-12-06") { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", + "", // Account SAS requires an additional newline character + ].join("\n"); + } + else { + stringToSign = [ + sharedKeyCredential.accountName, + parsedPermissions, + parsedServices, + parsedResourceTypes, + accountSASSignatureValues.startsOn + ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) + : "", + truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), + accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", + accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", + version, + "", // Account SAS requires an additional newline character + ].join("\n"); + } const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); - return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange); + return new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope); } /** @@ -86440,12 +87857,17 @@ class BlobServiceClient extends StorageClient { options = options || {}; const extractedCreds = extractConnectionStringParts(connectionString); if (extractedCreds.kind === "AccountConnString") { - { + if (coreHttp.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); - options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + if (!options.proxyOptions) { + options.proxyOptions = coreHttp.getDefaultProxySettings(extractedCreds.proxyUri); + } const pipeline = newPipeline(sharedKeyCredential, options); return new BlobServiceClient(extractedCreds.url, pipeline); } + else { + throw new Error("Account connection string is only supported in Node.js environment"); + } } else if (extractedCreds.kind === "SASConnString") { const pipeline = newPipeline(new AnonymousCredential(), options); @@ -86484,13 +87906,13 @@ class BlobServiceClient extends StorageClient { const containerCreateResponse = await containerClient.create(updatedOptions); return { containerClient, - containerCreateResponse + containerCreateResponse, }; } catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -86514,7 +87936,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -86544,7 +87966,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -86574,7 +87996,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -86598,7 +88020,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -86623,7 +88045,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -86648,7 +88070,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -86674,7 +88096,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -86704,7 +88126,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -86747,7 +88169,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -86913,7 +88335,7 @@ class BlobServiceClient extends StorageClient { */ byPage: (settings = {}) => { return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - } + }, }; } /** @@ -87051,6 +88473,9 @@ class BlobServiceClient extends StorageClient { if (options.includeMetadata) { include.push("metadata"); } + if (options.includeSystem) { + include.push("system"); + } // AsyncIterableIterator to iterate over containers const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); const iter = this.listItems(listSegmentOptions); @@ -87072,7 +88497,7 @@ class BlobServiceClient extends StorageClient { */ byPage: (settings = {}) => { return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); - } + }, }; } /** @@ -87091,7 +88516,7 @@ class BlobServiceClient extends StorageClient { try { const response = await this.serviceContext.getUserDelegationKey({ startsOn: truncatedISO8061Date(startsOn, false), - expiresOn: truncatedISO8061Date(expiresOn, false) + expiresOn: truncatedISO8061Date(expiresOn, false), }, Object.assign({ abortSignal: options.abortSignal }, convertTracingToRequestOptionsBase(updatedOptions))); const userDelegationKey = { signedObjectId: response.signedObjectId, @@ -87100,7 +88525,7 @@ class BlobServiceClient extends StorageClient { signedExpiresOn: new Date(response.signedExpiresOn), signedService: response.signedService, signedVersion: response.signedVersion, - value: response.value + value: response.value, }; const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); return res; @@ -87108,7 +88533,7 @@ class BlobServiceClient extends StorageClient { catch (e) { span.setStatus({ code: coreTracing.SpanStatusCode.ERROR, - message: e.message + message: e.message, }); throw e; } @@ -87157,39 +88582,27 @@ class BlobServiceClient extends StorageClient { Object.defineProperty(exports, 'BaseRequestPolicy', { enumerable: true, - get: function () { - return coreHttp.BaseRequestPolicy; - } + get: function () { return coreHttp.BaseRequestPolicy; } }); Object.defineProperty(exports, 'HttpHeaders', { enumerable: true, - get: function () { - return coreHttp.HttpHeaders; - } + get: function () { return coreHttp.HttpHeaders; } }); Object.defineProperty(exports, 'RequestPolicyOptions', { enumerable: true, - get: function () { - return coreHttp.RequestPolicyOptions; - } + get: function () { return coreHttp.RequestPolicyOptions; } }); Object.defineProperty(exports, 'RestError', { enumerable: true, - get: function () { - return coreHttp.RestError; - } + get: function () { return coreHttp.RestError; } }); Object.defineProperty(exports, 'WebResource', { enumerable: true, - get: function () { - return coreHttp.WebResource; - } + get: function () { return coreHttp.WebResource; } }); Object.defineProperty(exports, 'deserializationPolicy', { enumerable: true, - get: function () { - return coreHttp.deserializationPolicy; - } + get: function () { return coreHttp.deserializationPolicy; } }); exports.AccountSASPermissions = AccountSASPermissions; exports.AccountSASResourceTypes = AccountSASResourceTypes; @@ -92200,7 +93613,7 @@ exports.deleteKey = deleteKey; processors = __webpack_require__(703); - setImmediate = __webpack_require__(343).setImmediate; + setImmediate = __webpack_require__(213).setImmediate; defaults = __webpack_require__(791).defaults; @@ -93127,7 +94540,8 @@ function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetry return { statusCode: error.statusCode, result: null, - headers: {} + headers: {}, + error }; } else { @@ -94697,7 +96111,7 @@ module.exports = inc * POSSIBILITY OF SUCH DAMAGE. */ -const punycode = __webpack_require__(213); +const punycode = __webpack_require__(815); const urlParse = __webpack_require__(835).parse; const util = __webpack_require__(669); const pubsuffix = __webpack_require__(562); @@ -97514,7 +98928,7 @@ WebIDLAlgorithm_1.idl_defineConst(NodeImpl.prototype, "DOCUMENT_POSITION_IMPLEME "use strict"; -const punycode = __webpack_require__(213); +const punycode = __webpack_require__(815); const tr46 = __webpack_require__(361); const specialSchemes = { From 45b7b136cfb310c1722227968931d6e69ac80d90 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mario=20Sch=C3=BCnadel?= Date: Mon, 25 Apr 2022 13:40:12 +0200 Subject: [PATCH 3/3] only Versions.kt and Dependencies.kt as cache key --- README.md | 2 +- __tests__/cache.test.ts | 2 +- dist/cleanup/index.js | 7 ++++++- dist/setup/index.js | 7 ++++++- src/cache.ts | 7 ++++++- 5 files changed, 20 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index cfd4163..cc9760b 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ Currently, the following distributions are supported: ### Caching packages dependencies The action has a built-in functionality for caching and restoring dependencies. It uses [actions/cache](https://github.com/actions/cache) under hood for caching dependencies but requires less configuration settings. Supported package managers are gradle and maven. The format of the used cache key is `setup-java-${{ platform }}-${{ packageManager }}-${{ fileHash }}`, where the hash is based on the following files: -- gradle: `**/*.gradle*`, `**/gradle-wrapper.properties`, `buildSrc/**/*.kt` +- gradle: `**/*.gradle*`, `**/gradle-wrapper.properties`, `buildSrc/**/Versions.kt`, `buildSrc/**/Dependencies.kt` - maven: `**/pom.xml` - sbt: `**/build.sbt` diff --git a/__tests__/cache.test.ts b/__tests__/cache.test.ts index bb7ead8..f7f151b 100644 --- a/__tests__/cache.test.ts +++ b/__tests__/cache.test.ts @@ -98,7 +98,7 @@ describe('dependency cache', () => { await expect(restore('gradle')).rejects.toThrowError( `No file in ${projectRoot( workspace - )} matched to [**/*.gradle*,**/gradle-wrapper.properties,buildSrc/**/*.kt], make sure you have checked out the target repository` + )} matched to [**/*.gradle*,**/gradle-wrapper.properties,buildSrc/**/Versions.kt,buildSrc/**/Dependencies.kt], make sure you have checked out the target repository` ); }); it('downloads cache based on build.gradle', async () => { diff --git a/dist/cleanup/index.js b/dist/cleanup/index.js index ca0dd27..52330db 100644 --- a/dist/cleanup/index.js +++ b/dist/cleanup/index.js @@ -63299,7 +63299,12 @@ const supportedPackageManager = [ id: 'gradle', path: [path_1.join(os_1.default.homedir(), '.gradle', 'caches'), path_1.join(os_1.default.homedir(), '.gradle', 'wrapper')], // https://github.com/actions/cache/blob/0638051e9af2c23d10bb70fa9beffcad6cff9ce3/examples.md#java---gradle - pattern: ['**/*.gradle*', '**/gradle-wrapper.properties', 'buildSrc/**/*.kt'] + pattern: [ + '**/*.gradle*', + '**/gradle-wrapper.properties', + 'buildSrc/**/Versions.kt', + 'buildSrc/**/Dependencies.kt' + ] }, { id: 'sbt', diff --git a/dist/setup/index.js b/dist/setup/index.js index 0cd24a4..0f3e330 100644 --- a/dist/setup/index.js +++ b/dist/setup/index.js @@ -18631,7 +18631,12 @@ const supportedPackageManager = [ id: 'gradle', path: [path_1.join(os_1.default.homedir(), '.gradle', 'caches'), path_1.join(os_1.default.homedir(), '.gradle', 'wrapper')], // https://github.com/actions/cache/blob/0638051e9af2c23d10bb70fa9beffcad6cff9ce3/examples.md#java---gradle - pattern: ['**/*.gradle*', '**/gradle-wrapper.properties', 'buildSrc/**/*.kt'] + pattern: [ + '**/*.gradle*', + '**/gradle-wrapper.properties', + 'buildSrc/**/Versions.kt', + 'buildSrc/**/Dependencies.kt' + ] }, { id: 'sbt', diff --git a/src/cache.ts b/src/cache.ts index 75186c6..08c8550 100644 --- a/src/cache.ts +++ b/src/cache.ts @@ -31,7 +31,12 @@ const supportedPackageManager: PackageManager[] = [ id: 'gradle', path: [join(os.homedir(), '.gradle', 'caches'), join(os.homedir(), '.gradle', 'wrapper')], // https://github.com/actions/cache/blob/0638051e9af2c23d10bb70fa9beffcad6cff9ce3/examples.md#java---gradle - pattern: ['**/*.gradle*', '**/gradle-wrapper.properties', 'buildSrc/**/*.kt'] + pattern: [ + '**/*.gradle*', + '**/gradle-wrapper.properties', + 'buildSrc/**/Versions.kt', + 'buildSrc/**/Dependencies.kt' + ] }, { id: 'sbt',