diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 000000000..2ac7ccbc5 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "test/azure-storage-node"] + path = externaltests/azure-storage-node + url = https://github.com/Azure/azure-storage-node diff --git a/.jshintignore b/.jshintignore new file mode 100644 index 000000000..65880a4cc --- /dev/null +++ b/.jshintignore @@ -0,0 +1,3 @@ +node_modules/* +packages +.git \ No newline at end of file diff --git a/.jshintrc b/.jshintrc new file mode 100644 index 000000000..cee4f5639 --- /dev/null +++ b/.jshintrc @@ -0,0 +1,21 @@ +{ + "esversion": 6, + "bitwise": true, + "curly": false, + "eqeqeq": false, + "forin": true, + "latedef": true, + "maxparams": false, + "maxdepth": false, + "maxstatements": false, + "maxcomplexity": false, + "noarg": true, + "node": true, + "nonew": true, + "plusplus": false, + "strict": false, + "trailingcomma": true, + "undef": true, + "unused": true, + "shadow": "inner" + } \ No newline at end of file diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 000000000..1437a239e --- /dev/null +++ b/.prettierignore @@ -0,0 +1,7 @@ +package.json +package-lock.json +azurite-testdrive/ +externaltests/azure-storage-node/ +node_modules/ +.vscode/ +nuget/ diff --git a/.prettierrc.json b/.prettierrc.json new file mode 100644 index 000000000..6232ec461 --- /dev/null +++ b/.prettierrc.json @@ -0,0 +1,14 @@ +{ + "printWidth": 80, + "parser": "typescript", + "arrowParens" : "always", + "tabWidth": 2, + "useTabs":false, + "semi":true, + "singleQuote": false, + "trailingComma": "es5", + "bracketSpacing": true, + "requirePragma": false, + "insertPragma": true, + "proseWrap": "never" +} diff --git a/Dockerfile b/Dockerfile old mode 100644 new mode 100755 index 8ae7d364a..22db5c0e0 --- a/Dockerfile +++ b/Dockerfile @@ -1,12 +1,13 @@ -FROM node:alpine +FROM node:10.6-alpine WORKDIR /opt/azurite -COPY package.json /opt/azurite +COPY package.json package-lock.json /opt/azurite/ RUN npm install COPY bin /opt/azurite/bin COPY lib /opt/azurite/lib +COPY test /opt/azurite/test VOLUME /opt/azurite/folder diff --git a/README.md b/README.md index b681da1bc..7e7c3fb22 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,9 @@ After installation you can install Azurite simply with `npm` which is Node.js pa `$ npm install -g azurite` -Then simply start it with the following command: +## Running Azurite + +Simply start it with the following command: `$ azurite -l path/to/localfolder` @@ -32,6 +34,7 @@ For Table Storage Emulator only: `$ azurite-table -l path/to/azurite/workspace` + ## Nuget Azurite is also available as Nuget package at [https://www.nuget.org/packages/Azurite/](https://www.nuget.org/packages/Azurite/). You can install it via the [Package Manager Console](https://docs.nuget.org/docs/start-here/using-the-package-manager-console) with the following command: @@ -90,6 +93,31 @@ $ az storage container create --name 'test' --connection-string 'DefaultEndpoint } ``` +## Current List of Command Line Options + +``` +-a +``` +Enables sharedkey authentication check +``` +-l c:\tmp\emulatorPath +--location c:\tmp\emulatorPath +``` +Allows the specification of a path +``` +--blobPort 101000 +``` +Sets the TCP Port for blob storage to the value following the argument. +``` +--queuePort 10001 +``` +Sets the TCP Port for queue storage to the value following the argument. +``` +--tablePort 10002 +``` +Sets the TCP Port for table storage to the value following the argument. + + # Contributions ## What do I need to know to help? @@ -101,10 +129,16 @@ If you are interested in making a code contribution and would like to learn more - Azurite makes heavy use of [Bluebird](http://bluebirdjs.com/docs/getting-started.html) which is a fully featured promises library with unmatched performance. ## What TODOs are there? -The current status of Azurite's support of the [Official Blob Storage REST API Specification](https://docs.microsoft.com/rest/api/storageservices/blob-service-rest-api) is listed in below section [API Support](https://github.com/arafato/azurite/#api-support). Features that are still unimplemented are marked with `[TODO]`. Features that are currently being worked on are marked with `[IN-PROGRESS]`. +We are using a combination of community feedback, and the Azure Storage Node package tests to validate Azurite's support of the [Official Blob Storage REST API Specification](https://docs.microsoft.com/rest/api/storageservices/blob-service-rest-api). +We shall create issues based on failing tests to help direct and prioritize our development efforts. +See also section below: [API Support](https://github.com/Azure/Azurite/#api-support). Current bugs that need to be fixed are listed at our [issues site on Github](https://github.com/Azure/Azurite/issues) and tagged with a red label `bug`. +Issues which we think might be a good place for newcomers to start, are tagged with [**"good first issue"**](https://github.com/Azure/Azurite/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22). + +Details on how to setup tests with the Azure Storage submodule, can be found under [Testing with Azure Storage Node](./doc/azure-storage-node_tdd.md). + ## Need Help? Be sure to check out the Microsoft Azure Developer Forums on MSDN or the Developer Forums on Stack Overflow if you have trouble with the provided code. @@ -115,6 +149,8 @@ If you encounter any bugs with the library please file an issue in the [Issues]( When sending pull requests, please send **non-breaking PRs** to the dev branch and breaking changes to the **dev_breaking** branch. Please do not make PRs against master. +- **Please include a Unit or Integration test with any code submission, this is a significant help when validating changes and helps reduce the time we need to spend on pull requests.** + ## Where can I go for help? If you need help, you can ask questions directly at our [issues site on Github](https://github.com/Azure/Azurite/issues). @@ -125,7 +161,8 @@ Alternatively, check out the following links: [Azure Storage Team Blog](https://blogs.msdn.com/b/windowsazurestorage/) # API Support -Currently, Azurite only supports the [Blob Storage APIs](https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/blob-service-rest-api), the [Queue Storage API](https://docs.microsoft.com/en-us/rest/api/storageservices/queue-service-rest-api), and the [Table Storage API](https://docs.microsoft.com/en-us/rest/api/storageservices/table-service-rest-api). Support for Azure Storage Files is planned, but currently not available. +Currently, Azurite only supports the [Blob Storage APIs](https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/blob-service-rest-api), the [Queue Storage API](https://docs.microsoft.com/en-us/rest/api/storageservices/queue-service-rest-api), and the [Table Storage API](https://docs.microsoft.com/en-us/rest/api/storageservices/table-service-rest-api). +Support for Azure Storage Files is planned, but currently not available. The Standard Emulator Connection String is the same as required by [Microsoft's Official Storage Emulator](https://go.microsoft.com/fwlink/?LinkId=717179): diff --git a/bin/azurite b/bin/azurite index a74b70008..bf833781c 100755 --- a/bin/azurite +++ b/bin/azurite @@ -1,59 +1,67 @@ #!/usr/bin/env node -'use strict'; +"use strict"; -const BbPromise = require('bluebird'); +const BbPromise = require("bluebird"); -process.on('unhandledRejection', (e) => { - console.error('**PANIC** Something unexpected happened! Emulator may be in an inconsistent state!'); - console.error(e); +process.on("unhandledRejection", (e) => { + console.error( + "**PANIC** Something unexpected happened! Emulator may be in an inconsistent state!" + ); + console.error(e); }); process.noDeprecation = true; -(() => BbPromise.resolve().then(() => { - // requiring here so that if anything went wrong, - // during require, it will be caught. - const argv = require('minimist')(process.argv.slice(2)), - env = require('./../lib/core/env'), - cli = require('./../lib/core/cli'); +(() => + BbPromise.resolve() + .then(() => { + // requiring here so that if anything went wrong, + // during require, it will be caught. + const argv = require("minimist")(process.argv.slice(2)), + env = require("./../lib/core/env"), + cli = require("./../lib/core/cli"); - return env.init(argv) + return env + .init(argv) .then(() => { - if (!env.silent) { - cli.asciiGreeting(); - } + if (!env.silent) { + cli.asciiGreeting(); + } }) .then(() => { - // Forking individual modules to spread them across different cores if possible - // and restarting them automatically in case of a crash. - const fork = require('child_process').fork; - - (function forkBlobModule(code, signal) { - const mod = fork(env.blobModulePath, process.argv); - mod.on('exit', forkBlobModule); - })(); - (function forkQueueModule(code, signal) { - const mod = fork(env.queueModulePath, process.argv); - mod.on('exit', forkQueueModule); - })(); - (function forkTableModule(code, signal) { - const mod = fork(env.tableModulePath, process.argv); - mod.on('exit', forkTableModule); - })(); + // Forking individual modules to spread them across different cores if possible + // and restarting them automatically in case of a crash. + const fork = require("child_process").fork; + + (function forkBlobModule(code, signal) { + const mod = fork(env.blobModulePath, process.argv); + mod.on("exit", forkBlobModule); + })(); + (function forkQueueModule(code, signal) { + const mod = fork(env.queueModulePath, process.argv); + mod.on("exit", forkQueueModule); + })(); + (function forkTableModule(code, signal) { + const mod = fork(env.tableModulePath, process.argv); + mod.on("exit", forkTableModule); + })(); }); -}).catch(e => { - process.exitCode = 1; - console.error(e); -}))(); + }) + .catch((e) => { + process.exitCode = 1; + console.error(e); + }))(); -// If this is a child process (e.g. forked by NPM through '$ npm start') we are propagating the signals from the +// If this is a child process (e.g. forked by NPM through '$ npm start') we are propagating the signals from the // parent (i.e. NPM) to exit from this process and its child processes. -process.on('SIGINT', () => { // e.g. STRG+C - process.exitCode = 1; - process.exit(); +process.on("SIGINT", () => { + // e.g. STRG+C + process.exitCode = 1; + process.exit(); }); -process.on('SIGTERM', () => { // e.g. end process from taskmanager - process.exitCode = 1; - process.exit(); -}); \ No newline at end of file +process.on("SIGTERM", () => { + // e.g. end process from taskmanager + process.exitCode = 1; + process.exit(); +}); diff --git a/bin/blob b/bin/blob index 3038b3a29..f86218d2c 100644 --- a/bin/blob +++ b/bin/blob @@ -1,23 +1,28 @@ #!/usr/bin/env node -'use strict'; +"use strict"; -const BbPromise = require('bluebird'); +const BbPromise = require("bluebird"); -process.on('unhandledRejection', (e) => { - console.error('**PANIC** Something unexpected happened! Blob Storage Emulator may be in an inconsistent state!'); - console.error(e); +process.on("unhandledRejection", (e) => { + console.error( + "**PANIC** Something unexpected happened! Blob Storage Emulator may be in an inconsistent state!" + ); + console.error(e); }); process.noDeprecation = true; -(() => BbPromise.resolve().then(() => { - // requiring here so that if anything went wrong, - // during require, it will be caught. - const argv = require('minimist')(process.argv.slice(2)); - const A = require('../lib/AzuriteBlob'), - azurite = new A(); - azurite.init(argv); -}).catch(e => { - process.exitCode = 1; - console.error(e); -}))(); \ No newline at end of file +(() => + BbPromise.resolve() + .then(() => { + // requiring here so that if anything went wrong, + // during require, it will be caught. + const argv = require("minimist")(process.argv.slice(2)); + const A = require("../lib/AzuriteBlob"), + azurite = new A(); + azurite.init(argv); + }) + .catch((e) => { + process.exitCode = 1; + console.error(e); + }))(); diff --git a/bin/queue b/bin/queue index 36d9977d1..c69d7e695 100644 --- a/bin/queue +++ b/bin/queue @@ -1,23 +1,28 @@ #!/usr/bin/env node -'use strict'; +"use strict"; -const BbPromise = require('bluebird'); +const BbPromise = require("bluebird"); -process.on('unhandledRejection', (e) => { - console.error('**PANIC** Something unexpected happened! Queue Storage Emulator may be in an inconsistent state!'); - console.error(e); +process.on("unhandledRejection", (e) => { + console.error( + "**PANIC** Something unexpected happened! Queue Storage Emulator may be in an inconsistent state!" + ); + console.error(e); }); process.noDeprecation = true; -(() => BbPromise.resolve().then(() => { - // requiring here so that if anything went wrong, - // during require, it will be caught. - const argv = require('minimist')(process.argv.slice(2)); - const A = require('../lib/AzuriteQueue'), - azurite = new A(); - azurite.init(argv); -}).catch(e => { - process.exitCode = 1; - console.error(e); -}))(); \ No newline at end of file +(() => + BbPromise.resolve() + .then(() => { + // requiring here so that if anything went wrong, + // during require, it will be caught. + const argv = require("minimist")(process.argv.slice(2)); + const A = require("../lib/AzuriteQueue"), + azurite = new A(); + azurite.init(argv); + }) + .catch((e) => { + process.exitCode = 1; + console.error(e); + }))(); diff --git a/bin/table b/bin/table index 118ab2760..48b5ecbea 100644 --- a/bin/table +++ b/bin/table @@ -1,23 +1,28 @@ #!/usr/bin/env node -'use strict'; +"use strict"; -const BbPromise = require('bluebird'); +const BbPromise = require("bluebird"); -process.on('unhandledRejection', (e) => { - console.error('**PANIC** Something unexpected happened! Table Storage Emulator may be in an inconsistent state!'); - console.error(e); +process.on("unhandledRejection", (e) => { + console.error( + "**PANIC** Something unexpected happened! Table Storage Emulator may be in an inconsistent state!" + ); + console.error(e); }); process.noDeprecation = true; -(() => BbPromise.resolve().then(() => { - // requiring here so that if anything went wrong, - // during require, it will be caught. - const argv = require('minimist')(process.argv.slice(2)); - const A = require('../lib/AzuriteTable'), - azurite = new A(); - azurite.init(argv); -}).catch(e => { - process.exitCode = 1; - console.error(e); -}))(); \ No newline at end of file +(() => + BbPromise.resolve() + .then(() => { + // requiring here so that if anything went wrong, + // during require, it will be caught. + const argv = require("minimist")(process.argv.slice(2)); + const A = require("../lib/AzuriteTable"), + azurite = new A(); + azurite.init(argv); + }) + .catch((e) => { + process.exitCode = 1; + console.error(e); + }))(); diff --git a/doc/azure-storage-node_tdd.md b/doc/azure-storage-node_tdd.md new file mode 100644 index 000000000..59797f947 --- /dev/null +++ b/doc/azure-storage-node_tdd.md @@ -0,0 +1,67 @@ +# Testing with Azure Storage Node + +We have added the Azure Storage Node package as a submodule under ./externaltests + +To run these tests, you need to open your git console and run the following command in your Azurite repo: + +```shell +git submodule update --init +``` + +Once the submodule has been cloned, you need to change to the **./externaltests/azure-storage-node** folder and run + +```shell +npm install +``` + +This will install the dependencies, and allow you to run the tests. + +To debug test cases, you can use the following addition to your VS Code launch.json: + +```json + { + "type": "node", + "request": "launch", + "env": { + "AZURITE_LOCATION" :"azurite-storage-testdrive", + "NOCK_OFF": "true", + "AZURE_STORAGE_CONNECTION_STRING": "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;", + "AZURE_STORAGE_CONNECTION_STRING_BLOB_ACCOUNT" : "DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;" + }, + "name": "Azure Storage Tests", + "program": "${workspaceRoot}/node_modules/mocha/bin/_mocha", + "args": [ + "-u", + "tdd", + "--timeout", + "999999", + "--colors", + "${workspaceRoot}/externaltests" + ], + "internalConsoleOptions": "openOnSessionStart", + "protocol": "inspector" + }, +``` + +The tests are currently run by ./externaltests/azure-storage-shim.js , and we are looking at making this more comfortable for developers at the early stages of feature implementation. +Currently, we are just commenting out those test scripts which we do not want to run. +i.e: + +```javascript +describe('azure-storage-node tests', () => { + const azurite = new Azurite(); + before(() => { + const location = path.join(process.env.AZURITE_LOCATION, 'AZURE-STORAGE'); + return azurite.init({ l: location, silent: 'true', overwrite: 'true' }); + }); + + //requireTestDir(''); + // Currently runs azure-storage tests individually, until we implement a playlist definition + // require('./azure-storage-node/test/services/blob/blobservice-archive-tests'); + require('./azure-storage-node/test/services/blob/blobservice-container-tests'); + // require('./azure-storage-node/test/services/blob/blobservice-lease-tests'); + // require('./azure-storage-node/test/services/blob/blobservice-sse-tests'); + // require('./azure-storage-node/test/services/blob/blobservice-tests'); + // require('./azure-storage-node/test/services/blob/blobservice-uploaddownload-tests'); + // require('./azure-storage-node/test/services/blob/blobservice-uploaddownload-scale-tests'); +``` diff --git a/externaltests/.env b/externaltests/.env new file mode 100644 index 000000000..02b624219 --- /dev/null +++ b/externaltests/.env @@ -0,0 +1,3 @@ +AZURITE_LOCATION=azurite-testdrive +NOCK_OFF=true +AZURE_STORAGE_CONNECTION_STRING=DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1; diff --git a/externaltests/azure-storage-node b/externaltests/azure-storage-node new file mode 160000 index 000000000..d7b7e16f7 --- /dev/null +++ b/externaltests/azure-storage-node @@ -0,0 +1 @@ +Subproject commit d7b7e16f7d7e08852f4fa1fb477acbdf0cc281d7 diff --git a/externaltests/azure-storage-shim.js b/externaltests/azure-storage-shim.js new file mode 100644 index 000000000..f48916877 --- /dev/null +++ b/externaltests/azure-storage-shim.js @@ -0,0 +1,46 @@ +/** + * NOTE: the shim requires the following environment variables to be set to work: + * NOCK_OFF=true + * AZURE_STORAGE_CONNECTION_STRING=DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1; + * this is automatically handled via env-cmd in the package.json for npm run test + * + * @format + */ + +const fs = require("fs"), + path = require("path"), + Azurite = require("./../lib/AzuriteBlob"); + +function requireTestDir(dir) { + fs.readdirSync("./externaltests/azure-storage-node/test" + dir).map( + (file) => { + if (file.endsWith(".js")) + require("./azure-storage-node/test" + + dir + + path.sep + + file.slice(0, -3)); + } + ); +} + +describe("azure-storage-node tests", () => { + const azurite = new Azurite(); + before(() => { + const location = path.join(process.env.AZURITE_LOCATION, "AZURE-STORAGE"); + return azurite.init({ l: location, silent: "true", overwrite: "true" }); + }); + + //requireTestDir(''); + // Currently runs azure-storage tests individually, until we implement a playlist definition + // require('./azure-storage-node/test/services/blob/blobservice-archive-tests'); + require("./azure-storage-node/test/services/blob/blobservice-container-tests"); + // require('./azure-storage-node/test/services/blob/blobservice-lease-tests'); + // require('./azure-storage-node/test/services/blob/blobservice-sse-tests'); + // require('./azure-storage-node/test/services/blob/blobservice-tests'); + // require('./azure-storage-node/test/services/blob/blobservice-uploaddownload-tests'); + // require('./azure-storage-node/test/services/blob/blobservice-uploaddownload-scale-tests'); + + after(() => { + return azurite.close(); + }); +}); diff --git a/lib/AzuriteBlob.js b/lib/AzuriteBlob.js index aa5de4f12..fad9d72a3 100644 --- a/lib/AzuriteBlob.js +++ b/lib/AzuriteBlob.js @@ -1,87 +1,95 @@ -'use strict'; +/** @format */ -const BbPromise = require('bluebird'), - express = require('express'), - bodyParser = require('body-parser'), - env = require('./core/env'), - storageManager = require('./core/blob/StorageManager'), - morgan = require('morgan'), - cli = require('./core/cli'); +"use strict"; + +const BbPromise = require("bluebird"), + express = require("express"), + bodyParser = require("body-parser"), + env = require("./core/env"), + storageManager = require("./core/blob/StorageManager"), + morgan = require("morgan"), + cli = require("./core/cli"); class AzuriteBlob { - constructor() { - this.server; - // Support for PM2 Graceful Shutdown on Windows and Linux/OSX - // See http://pm2.keymetrics.io/docs/usage/signals-clean-restart/ - if (process.platform === 'win32') { - process.on('message', function (msg) { - if (msg == 'shutdown') { - this.close(); - } - }); - } - else { - process.on('SIGINT', function () { - this.close(); - }); + constructor() { + this.server; + // Support for PM2 Graceful Shutdown on Windows and Linux/OSX + // See http://pm2.keymetrics.io/docs/usage/signals-clean-restart/ + if (process.platform === "win32") { + process.on("message", function(msg) { + if (msg == "shutdown") { + this.close(); } + }); + } else { + process.on("SIGINT", function() { + this.close(); + }); } + } - init(options) { - return env.init(options) - .then(() => { - return storageManager.init() - }) - .then(() => { - const app = express(); - if (!env.silent) { - app.use(morgan('dev')); - } - // According to RFC 7231: - // An origin server MAY respond with a status code of 415 (Unsupported - // Media Type) if a representation in the request message has a content - // coding that is not acceptable. - // body-parser, however, throws an error. We thus ignore unsupported content encodings and treat them as 'identity'. - app.use((req, res, next) => { - const encoding = (req.headers['content-encoding'] || 'identity').toLowerCase(); - if (encoding !== 'deflate' || - encoding !== 'gzip' || - encoding !== 'identity') { - delete req.headers['content-encoding']; - } - next(); - }) - app.use(bodyParser.raw({ - inflate: true, - limit: '268435kb', // Maximum size of a single PUT Blob operation as per spec. - type: function (type) { - return true; - } - })); - app.use(`/blobs`, express.static(env.localStoragePath)); - require('./routes/blob/AccountRoute')(app); - require('./routes/blob/ContainerRoute')(app); - require('./routes/blob/BlobRoute')(app); - require('./routes/blob/NotFoundRoute')(app); - app.use(require('./middleware/blob/cors')); - app.use(require('./middleware/blob/authentication')); - app.use(require('./middleware/blob/validation')); - app.use(require('./middleware/blob/actions')); - this.server = app.listen(env.blobStoragePort, () => { - if (!env.silent) { - cli.blobStorageStatus(); - } - }); - }); - } - - close() { - return BbPromise.try(() => { - this.server.close(); - storageManager.flush(); - return storageManager.close(); + init(options) { + return env + .init(options) + .then(() => { + return storageManager.init(); + }) + .then(() => { + const app = express(); + if (!env.silent) { + app.use(morgan("dev")); + } + // According to RFC 7231: + // An origin server MAY respond with a status code of 415 (Unsupported + // Media Type) if a representation in the request message has a content + // coding that is not acceptable. + // body-parser, however, throws an error. We thus ignore unsupported content encodings and treat them as 'identity'. + app.use((req, res, next) => { + const encoding = ( + req.headers["content-encoding"] || "identity" + ).toLowerCase(); + if ( + encoding !== "deflate" || + encoding !== "gzip" || + encoding !== "identity" + ) { + delete req.headers["content-encoding"]; + } + next(); }); - } + app.use( + bodyParser.raw({ + inflate: true, + limit: "268435kb", // Maximum size of a single PUT Blob operation as per spec. + type: function(type) { + return true; + }, + }) + ); + app.use(`/blobs`, express.static(env.localStoragePath)); + require("./routes/blob/AccountRoute")(app); + require("./routes/blob/ContainerRoute")(app); + require("./routes/blob/BlobRoute")(app); + require("./routes/blob/NotFoundRoute")(app); + app.use(require("./middleware/blob/cors")); + app.use(require("./middleware/blob/authentication")); + app.use(require("./middleware/blob/validation")); + app.use(require("./middleware/blob/actions")); + this.server = app.listen(env.blobStoragePort, () => { + if (!env.silent) { + cli.blobStorageStatus(); + } + }); + }); + } + + close() { + return BbPromise.try(() => { + this.server.close(); + storageManager.flush(); + return storageManager.close(); + }); + } } module.exports = AzuriteBlob; diff --git a/lib/AzuriteQueue.js b/lib/AzuriteQueue.js index 8de10e198..5d8144439 100644 --- a/lib/AzuriteQueue.js +++ b/lib/AzuriteQueue.js @@ -1,63 +1,65 @@ -'use strict'; +/** @format */ -const express = require('express'), - env = require('./core/env'), - bodyParser = require('body-parser'), - morgan = require('morgan'), - cli = require('./core/cli'), - BbPromise = require('bluebird'); +"use strict"; + +const express = require("express"), + env = require("./core/env"), + bodyParser = require("body-parser"), + morgan = require("morgan"), + cli = require("./core/cli"), + BbPromise = require("bluebird"); class AzuriteQueue { - constructor() { - this.server; - // Support for PM2 Graceful Shutdown on Windows and Linux/OSX - // See http://pm2.keymetrics.io/docs/usage/signals-clean-restart/ - if (process.platform === 'win32') { - process.on('message', function (msg) { - if (msg == 'shutdown') { - this.close(); - } - }); - } - else { - process.on('SIGINT', function () { - this.close(); - }); + constructor() { + this.server; + // Support for PM2 Graceful Shutdown on Windows and Linux/OSX + // See http://pm2.keymetrics.io/docs/usage/signals-clean-restart/ + if (process.platform === "win32") { + process.on("message", function(msg) { + if (msg == "shutdown") { + this.close(); } + }); + } else { + process.on("SIGINT", function() { + this.close(); + }); } + } - init(options) { - return env.init(options) - .then(() => { - const app = express(); - if (!env.silent) { - app.use(morgan('dev')); - } - app.use(bodyParser.raw({ - inflate: true, - limit: '10000kb', - type: function (type) { - return true; - } - })); - require('./routes/queue/AccountRoute')(app); - require('./routes/queue/QueueRoute')(app); - require('./routes/queue/MessageRoute')(app); - app.use(require('./middleware/queue/validation')); - app.use(require('./middleware/queue/actions')); - this.server = app.listen(env.queueStoragePort, () => { - if (!env.silent) { - cli.queueStorageStatus(); - } - }); - }); - } + init(options) { + return env.init(options).then(() => { + const app = express(); + if (!env.silent) { + app.use(morgan("dev")); + } + app.use( + bodyParser.raw({ + inflate: true, + limit: "10000kb", + type: function(type) { + return true; + }, + }) + ); + require("./routes/queue/AccountRoute")(app); + require("./routes/queue/QueueRoute")(app); + require("./routes/queue/MessageRoute")(app); + app.use(require("./middleware/queue/validation")); + app.use(require("./middleware/queue/actions")); + this.server = app.listen(env.queueStoragePort, () => { + if (!env.silent) { + cli.queueStorageStatus(); + } + }); + }); + } - close() { - return BbPromise.try(() => { - this.server.close(); - }); - } + close() { + return BbPromise.try(() => { + this.server.close(); + }); + } } -module.exports = AzuriteQueue; \ No newline at end of file +module.exports = AzuriteQueue; diff --git a/lib/AzuriteTable.js b/lib/AzuriteTable.js index f806d26fa..fbf388f22 100644 --- a/lib/AzuriteTable.js +++ b/lib/AzuriteTable.js @@ -1,67 +1,74 @@ -'use strict'; +/** @format */ -const express = require('express'), - env = require('./core/env'), - bodyParser = require('body-parser'), - morgan = require('morgan'), - tableStorageManager = require('./core/table/TableStorageManager'), - cli = require('./core/cli'); +"use strict"; + +const express = require("express"), + env = require("./core/env"), + bodyParser = require("body-parser"), + morgan = require("morgan"), + tableStorageManager = require("./core/table/TableStorageManager"), + cli = require("./core/cli"), + BbPromise = require("bluebird"); class AzuriteTable { - constructor() { - this.server; - // Support for PM2 Graceful Shutdown on Windows and Linux/OSX - // See http://pm2.keymetrics.io/docs/usage/signals-clean-restart/ - if (process.platform === 'win32') { - process.on('message', function (msg) { - if (msg == 'shutdown') { - this.close(); - } - }); - } - else { - process.on('SIGINT', function () { - this.close(); - }); + constructor() { + this.server; + // Support for PM2 Graceful Shutdown on Windows and Linux/OSX + // See http://pm2.keymetrics.io/docs/usage/signals-clean-restart/ + if (process.platform === "win32") { + process.on("message", function(msg) { + if (msg == "shutdown") { + this.close(); } + }); + } else { + process.on("SIGINT", function() { + this.close(); + }); } + } - init(options) { - return env.init(options) - .then(() => { - return tableStorageManager.init() - }) - .then(() => { - const app = express(); - if (!env.silent) { - app.use(morgan('dev')); - } - app.use(bodyParser.raw({ - inflate: true, - // According to https://docs.microsoft.com/en-us/rest/api/storageservices/understanding-the-table-service-data-model - // maximum size of an entity is 1MB - limit: '10000kb', - type: function (type) { - return true; - } - })); - require('./routes/table/TableRoute')(app); - require('./routes/table/EntityRoute')(app); - app.use(require('./middleware/table/validation')); - app.use(require('./middleware/table/actions')); - this.server = app.listen(env.tableStoragePort, () => { - if (!env.silent) { - cli.tableStorageStatus(); - } - }); - }); - } - - close() { - return BbPromise.try(() => { - this.server.close(); + init(options) { + return env + .init(options) + .then(() => { + return tableStorageManager.init(); + }) + .then(() => { + const app = express(); + if (!env.silent) { + app.use(morgan("dev")); + } + app.use( + bodyParser.raw({ + inflate: true, + // According to https://docs.microsoft.com/en-us/rest/api/storageservices/understanding-the-table-service-data-model + // maximum size of an entity is 1MB + limit: "10000kb", + type: function(type) { + return true; + }, + }) + ); + require("./routes/table/TableRoute")(app); + require("./routes/table/EntityRoute")(app); + app.use(require("./middleware/table/validation")); + app.use(require("./middleware/table/actions")); + this.server = app.listen(env.tableStoragePort, () => { + if (!env.silent) { + cli.tableStorageStatus(); + } }); - } + }); + } + + close() { + return BbPromise.try(() => { + this.server.close(); + tableStorageManager.flush(); + return tableStorageManager.close(); + }); + } } -module.exports = AzuriteTable; \ No newline at end of file +module.exports = AzuriteTable; diff --git a/lib/actions/blob/AbortCopyBlob.js b/lib/actions/blob/AbortCopyBlob.js index 59169537e..82b035b14 100644 --- a/lib/actions/blob/AbortCopyBlob.js +++ b/lib/actions/blob/AbortCopyBlob.js @@ -1,18 +1,18 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"); class AbortCopyBlob { - constructor() { - } + constructor() {} - process(azuriteRequest, res) { - storageManager.copyBlob(azuriteRequest) - .then((response) => { - res.status(204).send(); - }); - } + process(azuriteRequest, res) { + storageManager.copyBlob(azuriteRequest).then((response) => { + res.status(204).send(); + }); + } } -module.exports = new AbortCopyBlob(); \ No newline at end of file +module.exports = new AbortCopyBlob(); diff --git a/lib/actions/blob/CopyBlob.js b/lib/actions/blob/CopyBlob.js index d7b704d13..4bb1567e8 100644 --- a/lib/actions/blob/CopyBlob.js +++ b/lib/actions/blob/CopyBlob.js @@ -1,24 +1,27 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; - // TODO: - IsPending Validation module (uses CopyOpsManager) - // - Copy committed block blocks - // - Copy needs to be aborted if ETag changes while copy is pending +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"); + +// TODO: - IsPending Validation module (uses CopyOpsManager) +// - Copy committed block blocks +// - Copy needs to be aborted if ETag changes while copy is pending class CopyBlob { - constructor() { - } + constructor() {} - process(azuriteRequest, res) { - storageManager.copyBlob(azuriteRequest) - .then((response) => { - response.addHttpProperty(N.COPY_STATUS, response.proxy.original.copyStatus); - response.addHttpProperty(N.COPY_ID, response.proxy.original.copyId); - res.set(response.httpProps); - res.status(202).send(); - }); - } + process(azuriteRequest, res) { + storageManager.copyBlob(azuriteRequest).then((response) => { + response.addHttpProperty( + N.COPY_STATUS, + response.proxy.original.copyStatus + ); + response.addHttpProperty(N.COPY_ID, response.proxy.original.copyId); + res.set(response.httpProps); + res.status(202).send(); + }); + } } -module.exports = new CopyBlob(); \ No newline at end of file +module.exports = new CopyBlob(); diff --git a/lib/actions/blob/CreateContainer.js b/lib/actions/blob/CreateContainer.js index 399410397..c5ae7056c 100644 --- a/lib/actions/blob/CreateContainer.js +++ b/lib/actions/blob/CreateContainer.js @@ -1,18 +1,18 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"); class CreateContainer { - constructor() { - } + constructor() {} - process(azuriteRequest, res) { - storageManager.createContainer(azuriteRequest) - .then((response) => { - res.set(response.httpProps); - res.status(201).send(); - }); - } + process(azuriteRequest, res) { + storageManager.createContainer(azuriteRequest).then((response) => { + res.set(response.httpProps); + res.status(201).send(); + }); + } } -module.exports = new CreateContainer(); \ No newline at end of file +module.exports = new CreateContainer(); diff --git a/lib/actions/blob/DeleteBlob.js b/lib/actions/blob/DeleteBlob.js index d6ebf1cdd..62cbf56cd 100644 --- a/lib/actions/blob/DeleteBlob.js +++ b/lib/actions/blob/DeleteBlob.js @@ -1,18 +1,18 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"); class DeleteBlob { - constructor() { - } + constructor() {} - process(azuriteRequest, res) { - storageManager.deleteBlob(azuriteRequest) - .then((response) => { - res.set(response.httpProps); - res.status(202).send(); - }); - } + process(azuriteRequest, res) { + storageManager.deleteBlob(azuriteRequest).then((response) => { + res.set(response.httpProps); + res.status(202).send(); + }); + } } -module.exports = new DeleteBlob(); \ No newline at end of file +module.exports = new DeleteBlob(); diff --git a/lib/actions/blob/DeleteContainer.js b/lib/actions/blob/DeleteContainer.js index e0b40ffe7..b5f19f96a 100644 --- a/lib/actions/blob/DeleteContainer.js +++ b/lib/actions/blob/DeleteContainer.js @@ -1,23 +1,26 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"); class DeleteContainer { - constructor() { - } + constructor() {} - process(azuriteRequest, res) { - storageManager.deleteContainer(azuriteRequest) - .then((response) => { - res.set(response.httpProps); - res.status(202).send(); - // Fixme: For some unknown reason the outer catch in middleware/actions.js is never hit. - // Thus catching it here to make sure that a 500 is returned in an error case. - }).catch((e) => { - res.status(500).send(e.message); - throw e; - }) - } + process(azuriteRequest, res) { + storageManager + .deleteContainer(azuriteRequest) + .then((response) => { + res.set(response.httpProps); + res.status(202).send(); + // Fixme: For some unknown reason the outer catch in middleware/actions.js is never hit. + // Thus catching it here to make sure that a 500 is returned in an error case. + }) + .catch((e) => { + res.status(500).send(e.message); + throw e; + }); + } } -module.exports = new DeleteContainer(); \ No newline at end of file +module.exports = new DeleteContainer(); diff --git a/lib/actions/blob/GetBlob.js b/lib/actions/blob/GetBlob.js index 29acccb02..d0b3690cd 100644 --- a/lib/actions/blob/GetBlob.js +++ b/lib/actions/blob/GetBlob.js @@ -1,88 +1,109 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'), - EntityType = require('./../../core/Constants').StorageEntityType, - env = require('./../../core/env'), - req = require('request'), - fs = require("fs-extra"), - crypto = require('crypto'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"), + EntityType = require("./../../core/Constants").StorageEntityType, + env = require("./../../core/env"), + req = require("request"), + fs = require("fs-extra"), + crypto = require("crypto"); class GetBlob { - constructor() { - } + constructor() {} - process(request, res) { - const range = request.httpProps[N.RANGE]; - storageManager.getBlob(request) - .then((response) => { - response.addHttpProperty(N.ACCEPT_RANGES, 'bytes'); - response.addHttpProperty(N.BLOB_TYPE, response.proxy.original.entityType); - response.addHttpProperty(N.REQUEST_SERVER_ENCRYPTED, false); - response.addHttpProperty(N.CONTENT_TYPE, response.proxy.original.contentType); - // response.addHttpProperty(N.CONTENT_MD5, response.proxy.original.md5); - response.addHttpProperty(N.CONTENT_LANGUAGE, response.proxy.original.contentLanguage); - response.addHttpProperty(N.CONTENT_ENCODING, response.proxy.original.contentEncoding); - response.addHttpProperty(N.CONTENT_DISPOSITION, response.proxy.original.contentDisposition); - response.addHttpProperty(N.CACHE_CONTROL, response.proxy.original.cacheControl); - if (request.auth) response.sasOverrideHeaders(request.query); + process(request, res) { + const range = request.httpProps[N.RANGE]; + storageManager.getBlob(request).then((response) => { + response.addHttpProperty(N.ACCEPT_RANGES, "bytes"); + response.addHttpProperty(N.BLOB_TYPE, response.proxy.original.entityType); + response.addHttpProperty(N.REQUEST_SERVER_ENCRYPTED, false); + response.addHttpProperty( + N.CONTENT_TYPE, + response.proxy.original.contentType + ); + // response.addHttpProperty(N.CONTENT_MD5, response.proxy.original.md5); + response.addHttpProperty( + N.CONTENT_LANGUAGE, + response.proxy.original.contentLanguage + ); + response.addHttpProperty( + N.CONTENT_ENCODING, + response.proxy.original.contentEncoding + ); + response.addHttpProperty( + N.CONTENT_DISPOSITION, + response.proxy.original.contentDisposition + ); + response.addHttpProperty( + N.CACHE_CONTROL, + response.proxy.original.cacheControl + ); + if (request.auth) response.sasOverrideHeaders(request.query); - // If x-ms-range-get-content-md5 is specified together with the range attribute we load the entire data range into memory - // in order to compute the MD5 hash of this chunk. We cannot use piping in this case since we cannot modify the HTTP headers - // anymore once the response stream has started to get delivered. - // Otherwise we just pipe the result through to the client which is more performant. - if (range && request.httpProps[N.RANGE_GET_CONTENT_MD5]) { - const pair = range.split('=')[1].split('-'), - startByte = parseInt(pair[0]), - endByte = parseInt(pair[1]); + // If x-ms-range-get-content-md5 is specified together with the range attribute we load the entire data range into memory + // in order to compute the MD5 hash of this chunk. We cannot use piping in this case since we cannot modify the HTTP headers + // anymore once the response stream has started to get delivered. + // Otherwise we just pipe the result through to the client which is more performant. + if (range && request.httpProps[N.RANGE_GET_CONTENT_MD5]) { + const pair = range.split("=")[1].split("-"), + startByte = parseInt(pair[0]), + endByte = parseInt(pair[1]); - const fullPath = env.diskStorageUri(request.id); - const readStream = fs.createReadStream(fullPath, { - flags: 'r', - start: startByte, - end: endByte, - encoding: 'utf8' - }); - readStream.read(); - const data = []; - readStream.on('data', (chunk) => { - data.push(chunk); - }); - readStream.on('end', () => { - const body = new Buffer(data, 'utf8'); - const hash = crypto.createHash('md5') - .update(body) - .digest('base64'); - response.addHttpProperty(N.CONTENT_MD5, hash); - res.set(response.httpProps); - res.status(206).send(body); - }); - } else { - req(this._createRequestHeader(env.webStorageUri(request.id), range)) - .on('response', (staticResponse) => { - response.addHttpProperty(N.CONTENT_LENGTH, staticResponse.headers[N.CONTENT_LENGTH]); - if (range) { - response.httpProps[N.BLOB_CONTENT_MD5] = response.httpProps[N.CONTENT_MD5]; - delete response.httpProps[N.CONTENT_MD5]; - response.httpProps[N.CONTENT_RANGE] = staticResponse.headers[N.CONTENT_RANGE]; - } - res.set(response.httpProps); - (range) ? res.writeHead(206) : res.writeHead(200); - }) - .pipe(res); - } - }); - } + const fullPath = env.diskStorageUri(request.id); + const readStream = fs.createReadStream(fullPath, { + flags: "r", + start: startByte, + end: endByte, + encoding: "utf8", + }); + readStream.read(); + const data = []; + readStream.on("data", (chunk) => { + data.push(chunk); + }); + readStream.on("end", () => { + const body = new Buffer(data, "utf8"); + const hash = crypto + .createHash("md5") + .update(body) + .digest("base64"); + response.addHttpProperty(N.CONTENT_MD5, hash); + res.set(response.httpProps); + res.status(206).send(body); + }); + } else { + req(this._createRequestHeader(env.webStorageUri(request.id), range)) + .on("response", (staticResponse) => { + response.addHttpProperty( + N.CONTENT_LENGTH, + staticResponse.headers[N.CONTENT_LENGTH] + ); + if (range) { + response.httpProps[N.BLOB_CONTENT_MD5] = + response.httpProps[N.CONTENT_MD5]; + delete response.httpProps[N.CONTENT_MD5]; + response.httpProps[N.CONTENT_RANGE] = + staticResponse.headers[N.CONTENT_RANGE]; + } + res.set(response.httpProps); + range ? res.writeHead(206) : res.writeHead(200); + }) + .pipe(res); + } + }); + } - _createRequestHeader(url, range) { - const request = {}; - request.headers = {}; - request.url = url; - if (range) { - request.headers.Range = range - } - return request; + _createRequestHeader(url, range) { + const request = {}; + request.headers = {}; + request.url = url; + if (range) { + request.headers.Range = range; } + return request; + } } -module.exports = new GetBlob(); \ No newline at end of file +module.exports = new GetBlob(); diff --git a/lib/actions/blob/GetBlobMetadata.js b/lib/actions/blob/GetBlobMetadata.js index b414869fe..d7c134e25 100644 --- a/lib/actions/blob/GetBlobMetadata.js +++ b/lib/actions/blob/GetBlobMetadata.js @@ -1,18 +1,18 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"); class GetBlobMetadata { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.getBlobMetadata(request) - .then((response) => { - res.set(response.httpProps); - res.status(200).send(); - }); - } + process(request, res) { + storageManager.getBlobMetadata(request).then((response) => { + res.set(response.httpProps); + res.status(200).send(); + }); + } } -module.exports = new GetBlobMetadata(); \ No newline at end of file +module.exports = new GetBlobMetadata(); diff --git a/lib/actions/blob/GetBlobProperties.js b/lib/actions/blob/GetBlobProperties.js index 7ebeaf8d1..1e7999d29 100644 --- a/lib/actions/blob/GetBlobProperties.js +++ b/lib/actions/blob/GetBlobProperties.js @@ -1,45 +1,99 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'), - LeaseStatus = require('./../../core/Constants').LeaseStatus; +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"), + LeaseStatus = require("./../../core/Constants").LeaseStatus; class GetBlobProperties { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.getBlobProperties(request) - .then((response) => { - response.addHttpProperty(N.ACCEPT_RANGES, 'bytes'); - response.addHttpProperty(N.REQUEST_SERVER_ENCRYPTED, false); - response.addHttpProperty(N.LEASE_STATUS, ([LeaseStatus.AVAILABLE, LeaseStatus.BROKEN, LeaseStatus.EXPIRED].includes(response.proxy.original.leaseState)) ? 'unlocked' : 'locked'); - response.addHttpProperty(N.LEASE_STATE, response.proxy.original.leaseState); - if (response.httpProps[N.LEASE_STATE] === LeaseStatus.LEASED) { - response.addHttpProperty(N.LEASE_DURATION, (response.proxy.original.leaseDuration === -1) ? 'infinite' : 'fixed'); - } - response.addHttpProperty(N.CONTENT_TYPE, response.proxy.original.contentType); - response.addHttpProperty(N.CONTENT_MD5, response.proxy.original.md5); - response.addHttpProperty(N.CONTENT_LANGUAGE, response.proxy.original.contentLanguage); - response.addHttpProperty(N.CONTENT_ENCODING, response.proxy.original.contentEncoding); - response.addHttpProperty(N.CONTENT_DISPOSITION, response.proxy.original.contentDisposition); - response.addHttpProperty(N.CACHE_CONTROL, response.proxy.original.cacheControl); - response.addHttpProperty(N.BLOB_TYPE, response.proxy.original.entityType); - response.addHttpProperty(N.CONTENT_LENGTH, response.proxy.original.size); - response.addHttpProperty(N.COPY_ID, response.proxy.original.copyId); - response.addHttpProperty(N.COPY_STATUS, response.proxy.original.copyStatus); - response.addHttpProperty(N.COPY_COMPLETION_TIME, response.proxy.original.copyCompletionTime); - response.addHttpProperty(N.COPY_STATUS_DESCRIPTION, response.proxy.original.copyStatusDescription); - response.addHttpProperty(N.COPY_PROGRESS, response.proxy.original.copyProgress); - response.addHttpProperty(N.COPY_SOURCE, response.proxy.original.copySource); - response.addHttpProperty(N.INCREMENTAL_COPY, response.proxy.original.incrementalCopy); - response.addHttpProperty(N.SEQUENCE_NUMBER, response.proxy.original.sequenceNumber); - response.addHttpProperty(N.BLOB_COMMITTED_BLOCK_COUNT, response.proxy.original[N.BLOB_COMMITTED_BLOCK_COUNT]); - if (request.auth) response.sasOverrideHeaders(request.query); - res.set(response.httpProps); - res.status(200).send(); - }); - } + process(request, res) { + storageManager.getBlobProperties(request).then((response) => { + response.addHttpProperty(N.ACCEPT_RANGES, "bytes"); + response.addHttpProperty(N.REQUEST_SERVER_ENCRYPTED, false); + response.addHttpProperty( + N.LEASE_STATUS, + [ + LeaseStatus.AVAILABLE, + LeaseStatus.BROKEN, + LeaseStatus.EXPIRED, + ].includes(response.proxy.original.leaseState) + ? "unlocked" + : "locked" + ); + response.addHttpProperty( + N.LEASE_STATE, + response.proxy.original.leaseState + ); + if (response.httpProps[N.LEASE_STATE] === LeaseStatus.LEASED) { + response.addHttpProperty( + N.LEASE_DURATION, + response.proxy.original.leaseDuration === -1 ? "infinite" : "fixed" + ); + } + response.addHttpProperty( + N.CONTENT_TYPE, + response.proxy.original.contentType + ); + response.addHttpProperty(N.CONTENT_MD5, response.proxy.original.md5); + response.addHttpProperty( + N.CONTENT_LANGUAGE, + response.proxy.original.contentLanguage + ); + response.addHttpProperty( + N.CONTENT_ENCODING, + response.proxy.original.contentEncoding + ); + response.addHttpProperty( + N.CONTENT_DISPOSITION, + response.proxy.original.contentDisposition + ); + response.addHttpProperty( + N.CACHE_CONTROL, + response.proxy.original.cacheControl + ); + response.addHttpProperty(N.BLOB_TYPE, response.proxy.original.entityType); + response.addHttpProperty(N.CONTENT_LENGTH, response.proxy.original.size); + response.addHttpProperty(N.COPY_ID, response.proxy.original.copyId); + response.addHttpProperty( + N.COPY_STATUS, + response.proxy.original.copyStatus + ); + response.addHttpProperty( + N.COPY_COMPLETION_TIME, + response.proxy.original.copyCompletionTime + ); + response.addHttpProperty( + N.COPY_STATUS_DESCRIPTION, + response.proxy.original.copyStatusDescription + ); + response.addHttpProperty( + N.COPY_PROGRESS, + response.proxy.original.copyProgress + ); + response.addHttpProperty( + N.COPY_SOURCE, + response.proxy.original.copySource + ); + response.addHttpProperty( + N.INCREMENTAL_COPY, + response.proxy.original.incrementalCopy + ); + response.addHttpProperty( + N.SEQUENCE_NUMBER, + response.proxy.original.sequenceNumber + ); + response.addHttpProperty( + N.BLOB_COMMITTED_BLOCK_COUNT, + response.proxy.original[N.BLOB_COMMITTED_BLOCK_COUNT] + ); + if (request.auth) response.sasOverrideHeaders(request.query); + res.set(response.httpProps); + res.status(200).send(); + }); + } } -module.exports = new GetBlobProperties(); \ No newline at end of file +module.exports = new GetBlobProperties(); diff --git a/lib/actions/blob/GetBlobServiceProperties.js b/lib/actions/blob/GetBlobServiceProperties.js index 78f81f8db..210a8752d 100644 --- a/lib/actions/blob/GetBlobServiceProperties.js +++ b/lib/actions/blob/GetBlobServiceProperties.js @@ -1,21 +1,23 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - js2xmlparser = require('js2xmlparser'); +"use strict"; +const storageManager = require("./../../core/blob/StorageManager"), + js2xmlparser = require("js2xmlparser"); class GetBlobServiceProperties { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.getBlobServiceProperties(request) - .then((response) => { - const xml = js2xmlparser.parse('StorageServiceProperties', response.payload.StorageServiceProperties || []); - res.set(response.httpProps); - res.status(200).send(xml); - }); - } + process(request, res) { + storageManager.getBlobServiceProperties(request).then((response) => { + const xml = js2xmlparser.parse( + "StorageServiceProperties", + response.payload.StorageServiceProperties || [] + ); + res.set(response.httpProps); + res.status(200).send(xml); + }); + } } module.exports = new GetBlobServiceProperties(); diff --git a/lib/actions/blob/GetBlockList.js b/lib/actions/blob/GetBlockList.js index eabd8fddd..1a4830983 100644 --- a/lib/actions/blob/GetBlockList.js +++ b/lib/actions/blob/GetBlockList.js @@ -1,37 +1,51 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'), - BlockListType = require('./../../core/Constants').BlockListType, - js2xmlparser = require('js2xmlparser'), - Model = require('./../../xml/blob/BlockListXmlModel'); +"use strict"; +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"), + BlockListType = require("./../../core/Constants").BlockListType, + js2xmlparser = require("js2xmlparser"), + Model = require("./../../xml/blob/BlockListXmlModel"); class GetBlockList { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.getBlockList(request) - .then((response) => { - const xml = this._transformToXml(response.payload, request.blockListType); - response.addHttpProperty(N.BLOB_CONTENT_LENGTH, response.proxy.original.size); - response.addHttpProperty(N.CONTENT_TYPE, 'application/xml'); - res.status(200).send(xml); - }); - } + process(request, res) { + storageManager.getBlockList(request).then((response) => { + const xml = this._transformToXml(response.payload, request.blockListType); + response.addHttpProperty( + N.BLOB_CONTENT_LENGTH, + response.proxy.original.size + ); + response.addHttpProperty(N.CONTENT_TYPE, "application/xml"); + res.status(200).send(xml); + }); + } - _transformToXml(blockList, blockListType) { - let model = new Model.BlockList(blockListType); - for (let block of blockList) { - if (block.committed && (blockListType === BlockListType.COMMITTED || blockListType === BlockListType.ALL)) { - model.CommittedBlocks.Block.push(new Model.Block(block.blockId, block.size)); - } else if (!block.committed && (blockListType === BlockListType.UNCOMMITTED || blockListType === BlockListType.ALL)) { - model.UncommittedBlocks.Block.push(new Model.Block(block.blockId, block.size)); - } - } - return js2xmlparser.parse('BlockList', model); + _transformToXml(blockList, blockListType) { + let model = new Model.BlockList(blockListType); + for (let block of blockList) { + if ( + block.committed && + (blockListType === BlockListType.COMMITTED || + blockListType === BlockListType.ALL) + ) { + model.CommittedBlocks.Block.push( + new Model.Block(block.blockId, block.size) + ); + } else if ( + !block.committed && + (blockListType === BlockListType.UNCOMMITTED || + blockListType === BlockListType.ALL) + ) { + model.UncommittedBlocks.Block.push( + new Model.Block(block.blockId, block.size) + ); + } } + return js2xmlparser.parse("BlockList", model); + } } -module.exports = new GetBlockList(); \ No newline at end of file +module.exports = new GetBlockList(); diff --git a/lib/actions/blob/GetContainerAcl.js b/lib/actions/blob/GetContainerAcl.js index 82150b314..5fe2bb796 100644 --- a/lib/actions/blob/GetContainerAcl.js +++ b/lib/actions/blob/GetContainerAcl.js @@ -1,26 +1,35 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'), - js2xmlparser = require("js2xmlparser"); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"), + js2xmlparser = require("js2xmlparser"); class GetContainerAcl { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.getContainerAcl(request) - .then((response) => { - if (response.proxy.original.access !== 'private') { - response.addHttpProperty(N.BLOB_PUBLIC_ACCESS, response.proxy.original.access); - } - response.addHttpProperty(N.CONTENT_TYPE, 'application/xml'); - res.set(response.httpProps); - let xml = js2xmlparser.parse('SignedIdentifiers', response.proxy.original.signedIdentifiers || {}); - xml = xml.replace(``, ``); - res.status(200).send(xml); - }); - } + process(request, res) { + storageManager.getContainerAcl(request).then((response) => { + if (response.proxy.original.access !== "private") { + response.addHttpProperty( + N.BLOB_PUBLIC_ACCESS, + response.proxy.original.access + ); + } + response.addHttpProperty(N.CONTENT_TYPE, "application/xml"); + res.set(response.httpProps); + let xml = js2xmlparser.parse( + "SignedIdentifiers", + response.proxy.original.signedIdentifiers || {} + ); + xml = xml.replace( + ``, + `` + ); + res.status(200).send(xml); + }); + } } -module.exports = new GetContainerAcl(); \ No newline at end of file +module.exports = new GetContainerAcl(); diff --git a/lib/actions/blob/GetContainerMetadata.js b/lib/actions/blob/GetContainerMetadata.js index 5926a639b..2fa4d3bfd 100644 --- a/lib/actions/blob/GetContainerMetadata.js +++ b/lib/actions/blob/GetContainerMetadata.js @@ -1,18 +1,18 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"); class GetContainerMetadata { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.getContainerMetadata(request) - .then((response) => { - res.set(response.httpProps); - res.status(200).send(); - }); - } + process(request, res) { + storageManager.getContainerMetadata(request).then((response) => { + res.set(response.httpProps); + res.status(200).send(); + }); + } } -module.exports = new GetContainerMetadata(); \ No newline at end of file +module.exports = new GetContainerMetadata(); diff --git a/lib/actions/blob/GetContainerProperties.js b/lib/actions/blob/GetContainerProperties.js index 2dfd4d047..c2c80b6d0 100644 --- a/lib/actions/blob/GetContainerProperties.js +++ b/lib/actions/blob/GetContainerProperties.js @@ -1,28 +1,46 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - LeaseStatus = require('./../../core/Constants').LeaseStatus, - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + LeaseStatus = require("./../../core/Constants").LeaseStatus, + N = require("./../../core/HttpHeaderNames"); class GetContainerProperties { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.getContainerProperties(request) - .then((response) => { - response.addHttpProperty(N.LEASE_STATUS, ([LeaseStatus.AVAILABLE, LeaseStatus.BROKEN, LeaseStatus.EXPIRED].includes(response.proxy.original.leaseState)) ? 'unlocked' : 'locked'); - response.addHttpProperty(N.LEASE_STATE, response.proxy.original.leaseState); - if (response.httpProps[N.LEASE_STATE] === LeaseStatus.LEASED) { - response.addHttpProperty(N.LEASE_DURATION, (response.proxy.original.leaseDuration === -1) ? 'infinite' : 'fixed'); - } - if (response.proxy.original.access !== 'private') { - response.addHttpProperty(N.BLOB_PUBLIC_ACCESS, response.proxy.original.access); - } - res.set(response.httpProps); - res.status(200).send(); - }); - } + process(request, res) { + storageManager.getContainerProperties(request).then((response) => { + response.addHttpProperty( + N.LEASE_STATUS, + [ + LeaseStatus.AVAILABLE, + LeaseStatus.BROKEN, + LeaseStatus.EXPIRED, + ].includes(response.proxy.original.leaseState) + ? "unlocked" + : "locked" + ); + response.addHttpProperty( + N.LEASE_STATE, + response.proxy.original.leaseState + ); + if (response.httpProps[N.LEASE_STATE] === LeaseStatus.LEASED) { + response.addHttpProperty( + N.LEASE_DURATION, + response.proxy.original.leaseDuration === -1 ? "infinite" : "fixed" + ); + } + if (response.proxy.original.access !== "private") { + response.addHttpProperty( + N.BLOB_PUBLIC_ACCESS, + response.proxy.original.access + ); + } + res.set(response.httpProps); + res.status(200).send(); + }); + } } -module.exports = new GetContainerProperties(); \ No newline at end of file +module.exports = new GetContainerProperties(); diff --git a/lib/actions/blob/GetPageRanges.js b/lib/actions/blob/GetPageRanges.js index 81a6f2244..8293b6243 100644 --- a/lib/actions/blob/GetPageRanges.js +++ b/lib/actions/blob/GetPageRanges.js @@ -1,29 +1,32 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'), - PageListXmlModel = require('./../../xml/blob/PageListXmlModel'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"), + PageListXmlModel = require("./../../xml/blob/PageListXmlModel"); class GetPageRanges { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.getPageRanges(request) - .then((response) => { - const model = this._createModel(response.payload); - response.addHttpProperty(N.BLOB_CONTENT_LENGTH, response.proxy.original.size); - res.status(200).send(model.toString()); - }); - } + process(request, res) { + storageManager.getPageRanges(request).then((response) => { + const model = this._createModel(response.payload); + response.addHttpProperty( + N.BLOB_CONTENT_LENGTH, + response.proxy.original.size + ); + res.status(200).send(model.toString()); + }); + } - _createModel(pageRanges) { - const model = new PageListXmlModel(); - for (const pr of pageRanges) { - model.addPageRange(pr.start * 512, pr.end * 512 - 1); - } - return model; + _createModel(pageRanges) { + const model = new PageListXmlModel(); + for (const pr of pageRanges) { + model.addPageRange(pr.start * 512, pr.end * 512 - 1); } + return model; + } } -module.exports = new GetPageRanges(); \ No newline at end of file +module.exports = new GetPageRanges(); diff --git a/lib/actions/blob/LeaseBlob.js b/lib/actions/blob/LeaseBlob.js index 5a46f0ea5..b8fbc7209 100644 --- a/lib/actions/blob/LeaseBlob.js +++ b/lib/actions/blob/LeaseBlob.js @@ -1,35 +1,40 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'), - LeaseActions = require('./../../core/Constants').LeaseActions; +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"), + LeaseActions = require("./../../core/Constants").LeaseActions; class LeaseBlob { - constructor() { - } - process(request, res) { - storageManager.leaseBlob(request) - .then((response) => { - response.addHttpProperty(N.LEASE_ID, response.proxy.original.leaseId); - let statusCode; - switch (request.httpProps[N.LEASE_ACTION]) { - case LeaseActions.ACQUIRE: - statusCode = 201; - break; - case LeaseActions.RENEW: - case LeaseActions.CHANGE: - case LeaseActions.RELEASE: - statusCode = 200; - break; - case LeaseActions.BREAK: - const leaseTimeRemaining = Math.floor((response.proxy.original.leaseBrokenAt - request.now) / 1000); - response.addHttpProperty(N.LEASE_TIME, (leaseTimeRemaining > 0) ? leaseTimeRemaining : 0); - statusCode = 202; - } - res.set(response.httpProps); - res.status(statusCode).send(); - }); - } + constructor() {} + process(request, res) { + storageManager.leaseBlob(request).then((response) => { + response.addHttpProperty(N.LEASE_ID, response.proxy.original.leaseId); + let statusCode; + switch (request.httpProps[N.LEASE_ACTION]) { + case LeaseActions.ACQUIRE: + statusCode = 201; + break; + case LeaseActions.RENEW: + case LeaseActions.CHANGE: + case LeaseActions.RELEASE: + statusCode = 200; + break; + case LeaseActions.BREAK: + const leaseTimeRemaining = Math.floor( + (response.proxy.original.leaseBrokenAt - request.now) / 1000 + ); + response.addHttpProperty( + N.LEASE_TIME, + leaseTimeRemaining > 0 ? leaseTimeRemaining : 0 + ); + statusCode = 202; + } + res.set(response.httpProps); + res.status(statusCode).send(); + }); + } } -module.exports = new LeaseBlob(); \ No newline at end of file +module.exports = new LeaseBlob(); diff --git a/lib/actions/blob/LeaseContainer.js b/lib/actions/blob/LeaseContainer.js index 3c3db9bca..2cdefbd69 100644 --- a/lib/actions/blob/LeaseContainer.js +++ b/lib/actions/blob/LeaseContainer.js @@ -1,36 +1,41 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'), - LeaseActions = require('./../../core/Constants').LeaseActions; +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"), + LeaseActions = require("./../../core/Constants").LeaseActions; class LeaseContainer { - constructor() { - } - process(request, res) { - storageManager.leaseContainer(request) - .then((response) => { - response.addHttpProperty(N.LEASE_ID, response.proxy.original.leaseId); - const leaseTimeRemaining = Math.floor((response.proxy.original.leaseBrokenAt - request.now) / 1000); - response.addHttpProperty(N.LEASE_TIME, (leaseTimeRemaining > 0) ? leaseTimeRemaining : 0); + constructor() {} + process(request, res) { + storageManager.leaseContainer(request).then((response) => { + response.addHttpProperty(N.LEASE_ID, response.proxy.original.leaseId); + const leaseTimeRemaining = Math.floor( + (response.proxy.original.leaseBrokenAt - request.now) / 1000 + ); + response.addHttpProperty( + N.LEASE_TIME, + leaseTimeRemaining > 0 ? leaseTimeRemaining : 0 + ); - let statusCode; - switch (request.httpProps[N.LEASE_ACTION]) { - case LeaseActions.ACQUIRE: - statusCode = 201; - break; - case LeaseActions.RENEW: - case LeaseActions.CHANGE: - case LeaseActions.RELEASE: - statusCode = 200; - break; - case LeaseActions.BREAK: - statusCode = 202; - } - res.set(response.httpProps); - res.status(statusCode).send(); - }); - } + let statusCode; + switch (request.httpProps[N.LEASE_ACTION]) { + case LeaseActions.ACQUIRE: + statusCode = 201; + break; + case LeaseActions.RENEW: + case LeaseActions.CHANGE: + case LeaseActions.RELEASE: + statusCode = 200; + break; + case LeaseActions.BREAK: + statusCode = 202; + } + res.set(response.httpProps); + res.status(statusCode).send(); + }); + } } -module.exports = new LeaseContainer(); \ No newline at end of file +module.exports = new LeaseContainer(); diff --git a/lib/actions/blob/ListBlobs.js b/lib/actions/blob/ListBlobs.js index 3af9e12c4..2acd9323d 100644 --- a/lib/actions/blob/ListBlobs.js +++ b/lib/actions/blob/ListBlobs.js @@ -1,134 +1,188 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - LeaseStatus = require('./../../core/Constants').LeaseStatus, - N = require('./../../core/HttpHeaderNames'), - js2xmlparser = require("js2xmlparser"), - utils = require('./../../core/utils'), - model = require('./../../xml/blob/BlobListXmlModel'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + LeaseStatus = require("./../../core/Constants").LeaseStatus, + N = require("./../../core/HttpHeaderNames"), + js2xmlparser = require("js2xmlparser"), + utils = require("./../../core/utils"), + model = require("./../../xml/blob/BlobListXmlModel"); class ListBlobs { - constructor() { - } + constructor() {} - process(request, res) { - const query = { - prefix: request.query.prefix || '', - delimiter: request.query.delimiter, - marker: parseInt(request.query.marker) || 0, - maxresults: parseInt(request.query.maxresults) || 5000, - include: request.query.include - } - storageManager.listBlobs(request, query) - .then((response) => { - response.addHttpProperty(N.CONTENT_TYPE, 'application/xml'); - const blobPrefixes = []; - const transformedModel = this._transformBlobList(response.payload, query, blobPrefixes, response.nextMarker); - let xmlDoc = js2xmlparser.parse('EnumerationResults', transformedModel); - xmlDoc = xmlDoc.replace(``, ``); - if (blobPrefixes.length > 0) { - xmlDoc = xmlDoc.replace(``, model.blobPrefixesToXml(blobPrefixes)); - } else { - xmlDoc = xmlDoc.replace(``, ''); - } - xmlDoc = xmlDoc.replace(/\>[\s]+\<'); - res.set(response.httpProps); - res.status(200).send(xmlDoc); - }); - } + process(request, res) { + const query = { + prefix: request.query.prefix || "", + delimiter: request.query.delimiter, + marker: parseInt(request.query.marker) || 0, + maxresults: parseInt(request.query.maxresults) || 5000, + include: request.query.include, + }; + storageManager.listBlobs(request, query).then((response) => { + response.addHttpProperty(N.CONTENT_TYPE, "application/xml"); + const blobPrefixes = []; + const transformedModel = this._transformBlobList( + response.payload, + query, + blobPrefixes, + response.nextMarker + ); + let xmlDoc = js2xmlparser.parse("EnumerationResults", transformedModel); + xmlDoc = xmlDoc.replace( + ``, + `` + ); + if (blobPrefixes.length > 0) { + xmlDoc = xmlDoc.replace( + ``, + model.blobPrefixesToXml(blobPrefixes) + ); + } else { + xmlDoc = xmlDoc.replace(``, ""); + } + xmlDoc = xmlDoc.replace(/\>[\s]+\<"); + res.set(response.httpProps); + res.status(200).send(xmlDoc); + }); + } - _transformBlobList(blobList, query, blobPrefixes, nextMarker) { - let xmlBlobListModel = new model.BlobList(); - (query.prefix === undefined) ? delete xmlBlobListModel.Prefix : xmlBlobListModel.Prefix = query.prefix; - (query.maxresults === undefined) ? delete xmlBlobListModel.MaxResults : xmlBlobListModel.MaxResults = query.maxresults; - (query.delimiter === undefined) ? delete xmlBlobListModel.Delimiter : xmlBlobListModel.Delimiter = query.delimiter; - if (nextMarker > 0) { - xmlBlobListModel.NextMarker = nextMarker; - } - if (query.delimiter !== undefined) { - blobList = blobList.filter((blob) => { - const blobName = blob.original.name; - const restOfName = blobName.substr(query.prefix.length, blobName.length); - const keep = restOfName.indexOf(query.delimiter) === -1; - if (!keep) { - if (restOfName.indexOf(query.delimiter) === -1) { - // No add to BlobPrefix - } else { - const blobPrefix = `${query.prefix}${restOfName.split(query.delimiter)[0]}${query.delimiter}`; - if (!blobPrefixes.includes(blobPrefix)) { - blobPrefixes.push(blobPrefix); - } - } - } - - return keep; - }); - } - - if (query.marker) { - (blobList.length === 0) ? delete xmlBlobListModel.Marker : xmlBlobListModel.Marker = blobList[0].original.name; + _transformBlobList(blobList, query, blobPrefixes, nextMarker) { + let xmlBlobListModel = new model.BlobList(); + query.prefix === undefined + ? delete xmlBlobListModel.Prefix + : (xmlBlobListModel.Prefix = query.prefix); + query.maxresults === undefined + ? delete xmlBlobListModel.MaxResults + : (xmlBlobListModel.MaxResults = query.maxresults); + query.delimiter === undefined + ? delete xmlBlobListModel.Delimiter + : (xmlBlobListModel.Delimiter = query.delimiter); + if (nextMarker > 0) { + xmlBlobListModel.NextMarker = nextMarker; + } + if (query.delimiter !== undefined) { + blobList = blobList.filter((blob) => { + const blobName = blob.original.name; + const restOfName = blobName.substr( + query.prefix.length, + blobName.length + ); + const keep = restOfName.indexOf(query.delimiter) === -1; + if (!keep) { + if (restOfName.indexOf(query.delimiter) === -1) { + // No add to BlobPrefix + } else { + const blobPrefix = `${query.prefix}${ + restOfName.split(query.delimiter)[0] + }${query.delimiter}`; + if (!blobPrefixes.includes(blobPrefix)) { + blobPrefixes.push(blobPrefix); + } + } } - for (const blob of blobList) { - let modelBlob = new model.Blob(blob.original.name, blob.original.blobType); - xmlBlobListModel.Blobs.Blob.push(modelBlob); - if (query.include !== 'metadata') { - delete modelBlob.Metadata; - } else { - this._addMetadata(modelBlob, blob.original.metaProps); - } - if (blob.original.snapshot) { - modelBlob.Snapshot = blob.original.snapshotDate; - } else { - delete modelBlob.Snapshot; - } - if (blob.original.copyId) { - modelBlob.Properties.CopyId = blob.original.copyId; - modelBlob.Properties.CopyStatus = blob.original.copyStatus; - modelBlob.Properties.CopySource = blob.original.copySource; - modelBlob.Properties.CopyProgress = blob.original.copyProgress; - modelBlob.Properties.CopyCompletionTime = blob.original.copyCompletionTime; - blob.original.copyStatusDescription - ? modelBlob.Properties.CopyStatusDescription = blob.original.copyStatusDescription - : delete modelBlob.Properties.CopyStatusDescription; - } else { - delete modelBlob.Properties.CopyId; - delete modelBlob.Properties.CopyStatus; - delete modelBlob.Properties.CopySource; - delete modelBlob.Properties.CopyProgress; - delete modelBlob.Properties.CopyCompletionTime; - delete modelBlob.Properties.CopyStatusDescription; - } + return keep; + }); + } - modelBlob.Properties['Last-Modified'] = blob.lastModified(); - modelBlob.Properties['Etag'] = blob.original.etag - modelBlob.Properties['Content-Type'] = blob.original.contentType ? blob.original.contentType : {}; - modelBlob.Properties['Content-Encoding'] = blob.original.contentEncoding ? blob.original.contentEncoding : {}; - modelBlob.Properties['Content-MD5'] = blob.original.md5 ? blob.original.md5 : {}; - modelBlob.Properties['Content-Length'] = blob.original.size; - modelBlob.Properties['Cache-Control'] = blob.original.cacheControl ? blob.original.cacheControl : {}; - modelBlob.Properties['Content-Language'] = blob.original.contentLanguage ? blob.original.contentLanguage : {}; - modelBlob.Properties['Content-Disposition'] = blob.original.contentDisposition ? blob.original.contentDisposition : {}; - modelBlob.Properties.BlobType = blob.original.entityType; - modelBlob.Properties['x-ms-blob-sequence-number'] = blob.original.sequenceNumber ? blob.original.sequenceNumber : {}; - modelBlob.Properties.LeaseStatus = ([LeaseStatus.AVAILABLE, LeaseStatus.BROKEN, LeaseStatus.EXPIRED].includes(blob.original.leaseState)) ? 'unlocked' : 'locked'; - modelBlob.Properties.LeaseState = blob.original.leaseState; - if (blob.original.leaseState === LeaseStatus.LEASED) { - modelBlob.Properties.LeaseDuration = (blob.original.leaseDuration === -1) ? 'infinite' : 'fixed'; - } else { - delete modelBlob.Properties.LeaseDuration; - } - } - return xmlBlobListModel; + if (query.marker) { + blobList.length === 0 + ? delete xmlBlobListModel.Marker + : (xmlBlobListModel.Marker = blobList[0].original.name); } - _addMetadata(blobModel, metaProps) { - Object.keys(metaProps).forEach((key) => { - let value = metaProps[key]; - key = key.replace('x-ms-meta-', ''); - blobModel.Metadata[key] = value; - }); + for (const blob of blobList) { + let modelBlob = new model.Blob( + blob.original.name, + blob.original.blobType + ); + xmlBlobListModel.Blobs.Blob.push(modelBlob); + if (query.include !== "metadata") { + delete modelBlob.Metadata; + } else { + this._addMetadata(modelBlob, blob.original.metaProps); + } + if (blob.original.snapshot) { + modelBlob.Snapshot = blob.original.snapshotDate; + } else { + delete modelBlob.Snapshot; + } + if (blob.original.copyId) { + modelBlob.Properties.CopyId = blob.original.copyId; + modelBlob.Properties.CopyStatus = blob.original.copyStatus; + modelBlob.Properties.CopySource = blob.original.copySource; + modelBlob.Properties.CopyProgress = blob.original.copyProgress; + modelBlob.Properties.CopyCompletionTime = + blob.original.copyCompletionTime; + blob.original.copyStatusDescription + ? (modelBlob.Properties.CopyStatusDescription = + blob.original.copyStatusDescription) + : delete modelBlob.Properties.CopyStatusDescription; + } else { + delete modelBlob.Properties.CopyId; + delete modelBlob.Properties.CopyStatus; + delete modelBlob.Properties.CopySource; + delete modelBlob.Properties.CopyProgress; + delete modelBlob.Properties.CopyCompletionTime; + delete modelBlob.Properties.CopyStatusDescription; + } + + modelBlob.Properties["Last-Modified"] = blob.lastModified(); + modelBlob.Properties["Etag"] = blob.original.etag; + modelBlob.Properties["Content-Type"] = blob.original.contentType + ? blob.original.contentType + : {}; + modelBlob.Properties["Content-Encoding"] = blob.original.contentEncoding + ? blob.original.contentEncoding + : {}; + modelBlob.Properties["Content-MD5"] = blob.original.md5 + ? blob.original.md5 + : {}; + modelBlob.Properties["Content-Length"] = blob.original.size; + modelBlob.Properties["Cache-Control"] = blob.original.cacheControl + ? blob.original.cacheControl + : {}; + modelBlob.Properties["Content-Language"] = blob.original.contentLanguage + ? blob.original.contentLanguage + : {}; + modelBlob.Properties["Content-Disposition"] = blob.original + .contentDisposition + ? blob.original.contentDisposition + : {}; + modelBlob.Properties.BlobType = blob.original.entityType; + modelBlob.Properties["x-ms-blob-sequence-number"] = blob.original + .sequenceNumber + ? blob.original.sequenceNumber + : {}; + modelBlob.Properties.LeaseStatus = [ + LeaseStatus.AVAILABLE, + LeaseStatus.BROKEN, + LeaseStatus.EXPIRED, + ].includes(blob.original.leaseState) + ? "unlocked" + : "locked"; + modelBlob.Properties.LeaseState = blob.original.leaseState; + if (blob.original.leaseState === LeaseStatus.LEASED) { + modelBlob.Properties.LeaseDuration = + blob.original.leaseDuration === -1 ? "infinite" : "fixed"; + } else { + delete modelBlob.Properties.LeaseDuration; + } } + return xmlBlobListModel; + } + + _addMetadata(blobModel, metaProps) { + Object.keys(metaProps).forEach((key) => { + let value = metaProps[key]; + key = key.replace("x-ms-meta-", ""); + blobModel.Metadata[key] = value; + }); + } } -module.exports = new ListBlobs(); \ No newline at end of file +module.exports = new ListBlobs(); diff --git a/lib/actions/blob/ListContainers.js b/lib/actions/blob/ListContainers.js index e8a51bb5d..3040ce8d9 100644 --- a/lib/actions/blob/ListContainers.js +++ b/lib/actions/blob/ListContainers.js @@ -1,62 +1,97 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - js2xmlparser = require("js2xmlparser"), - model = require('./../../xml/blob/ContainerListXmlModel'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + js2xmlparser = require("js2xmlparser"), + model = require("./../../xml/blob/ContainerListXmlModel"); class ListContainers { - constructor() { - } + constructor() {} - process(request, res) { - const prefix = request.query.prefix || '', - maxresults = request.query.maxresults || "5000", - includeMetadata = (request.query.include === 'metadata') ? true : false, - marker = request.query.marker || ''; - storageManager.listContainer(request, prefix, maxresults) - .then((response) => { - response.addHttpProperty('content-type', 'application/xml'); - res.set(response.httpProps); - let transformedModel = this._transformContainerList(response.payload, includeMetadata, prefix, maxresults, marker); - let xmlDoc = js2xmlparser.parse('EnumerationResults', transformedModel); - xmlDoc = xmlDoc.replace(``, ``); - xmlDoc = xmlDoc.replace(``, ``); - xmlDoc = xmlDoc.replace(/\>[\s]+\<'); - // Forcing Express.js to not touch the charset of the buffer in order to remove charset=utf-8 as part of the content-type - res.status(200).send(new Buffer(xmlDoc)); - }); - } + process(request, res) { + const prefix = request.query.prefix || "", + maxresults = request.query.maxresults || "5000", + includeMetadata = request.query.include === "metadata" ? true : false, + marker = request.query.marker || ""; + storageManager + .listContainer(request, prefix, maxresults) + .then((response) => { + response.addHttpProperty("content-type", "application/xml"); + res.set(response.httpProps); + let transformedModel = this._transformContainerList( + response.payload, + includeMetadata, + prefix, + maxresults, + marker + ); + let xmlDoc = js2xmlparser.parse("EnumerationResults", transformedModel); + xmlDoc = xmlDoc.replace( + ``, + `` + ); + xmlDoc = xmlDoc.replace( + ``, + `` + ); + xmlDoc = xmlDoc.replace(/\>[\s]+\<"); + // Forcing Express.js to not touch the charset of the buffer in order to remove charset=utf-8 as part of the content-type + res.status(200).send(new Buffer(xmlDoc)); + }); + } - _transformContainerList(containers, includeMetadata, prefix, maxresults, marker) { - let xmlContainerListModel = new model.ContainerList(); - (prefix === '') ? delete xmlContainerListModel.Prefix : xmlContainerListModel.Prefix = prefix; - (maxresults === '') ? delete xmlContainerListModel.MaxResults : xmlContainerListModel.MaxResults = maxresults; - (marker === '') ? delete xmlContainerListModel.Marker : xmlContainerListModel.Marker = marker; - // Fixme: We do not support markers yet - delete xmlContainerListModel.NextMarker; - for (let container of containers) { - if (container.name === '$logs') { - continue; - } - let modelContainer = new model.Container(container.name); - xmlContainerListModel.Containers.Container.push(modelContainer); - if (!includeMetadata || Object.keys(container.metaProps).length === 0) { - delete modelContainer.Metadata; - } else { - modelContainer.Metadata = container.metaProps; - } - modelContainer.Properties['Last-Modified'] = new Date(container.meta.updated || container.meta.created).toUTCString(); - modelContainer.Properties.ETag = container.etag; - modelContainer.Properties.LeaseStatus = (['available', 'broken', 'expired'].includes(container.leaseState)) ? 'unlocked' : 'locked'; - modelContainer.Properties.LeaseState = container.leaseState; - if (container.leaseState === 'leased') { - modelContainer.Properties.LeaseDuration = (container.leaseDuration === -1) ? 'infinite' : 'fixed'; - } else { - delete modelContainer.Properties.LeaseDuration; - } - } - return xmlContainerListModel; + _transformContainerList( + containers, + includeMetadata, + prefix, + maxresults, + marker + ) { + let xmlContainerListModel = new model.ContainerList(); + prefix === "" + ? delete xmlContainerListModel.Prefix + : (xmlContainerListModel.Prefix = prefix); + maxresults === "" + ? delete xmlContainerListModel.MaxResults + : (xmlContainerListModel.MaxResults = maxresults); + marker === "" + ? delete xmlContainerListModel.Marker + : (xmlContainerListModel.Marker = marker); + // Fixme: We do not support markers yet + delete xmlContainerListModel.NextMarker; + for (let container of containers) { + if (container.name === "$logs") { + continue; + } + let modelContainer = new model.Container(container.name); + xmlContainerListModel.Containers.Container.push(modelContainer); + if (!includeMetadata || Object.keys(container.metaProps).length === 0) { + delete modelContainer.Metadata; + } else { + modelContainer.Metadata = container.metaProps; + } + modelContainer.Properties["Last-Modified"] = new Date( + container.meta.updated || container.meta.created + ).toUTCString(); + modelContainer.Properties.ETag = container.etag; + modelContainer.Properties.LeaseStatus = [ + "available", + "broken", + "expired", + ].includes(container.leaseState) + ? "unlocked" + : "locked"; + modelContainer.Properties.LeaseState = container.leaseState; + if (container.leaseState === "leased") { + modelContainer.Properties.LeaseDuration = + container.leaseDuration === -1 ? "infinite" : "fixed"; + } else { + delete modelContainer.Properties.LeaseDuration; + } } + return xmlContainerListModel; + } } -module.exports = new ListContainers; \ No newline at end of file +module.exports = new ListContainers(); diff --git a/lib/actions/blob/PreflightBlobRequest.js b/lib/actions/blob/PreflightBlobRequest.js index 29b8b0d3b..924ae81ef 100644 --- a/lib/actions/blob/PreflightBlobRequest.js +++ b/lib/actions/blob/PreflightBlobRequest.js @@ -1,23 +1,36 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - AzuriteResponse = require('./../../model/blob/AzuriteResponse'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + AzuriteResponse = require("./../../model/blob/AzuriteResponse"), + N = require("./../../core/HttpHeaderNames"); class PreflightBlobRequest { - constructor() { - } + constructor() {} - process(req, res) { - const response = new AzuriteResponse(); // Add Access-Control-Expose-Headers - response.addHttpProperty(N.ACCESS_CONTROL_ALLOW_ORIGIN, req.httpProps[N.ORIGIN]); // Refactor into response - response.addHttpProperty(N.ACCESS_CONTROL_ALLOW_METHODS, req.httpProps[N.ACCESS_CONTROL_REQUEST_METHOD]); - response.addHttpProperty(N.ACCESS_CONTROL_ALLOW_HEADERS, req.httpProps[N.ACCESS_CONTROL_REQUEST_HEADERS]); - response.addHttpProperty(N.ACCESS_CONTROL_MAX_AGE, req.cors.maxAgeInSeconds); - response.addHttpProperty(N.ACCESS_CONTROL_ALLOW_CREDENTIALS, true); // Refactor into response - res.set(response.httpProps); - res.status(200).send(); - } + process(req, res) { + const response = new AzuriteResponse(); // Add Access-Control-Expose-Headers + response.addHttpProperty( + N.ACCESS_CONTROL_ALLOW_ORIGIN, + req.httpProps[N.ORIGIN] + ); // Refactor into response + response.addHttpProperty( + N.ACCESS_CONTROL_ALLOW_METHODS, + req.httpProps[N.ACCESS_CONTROL_REQUEST_METHOD] + ); + response.addHttpProperty( + N.ACCESS_CONTROL_ALLOW_HEADERS, + req.httpProps[N.ACCESS_CONTROL_REQUEST_HEADERS] + ); + response.addHttpProperty( + N.ACCESS_CONTROL_MAX_AGE, + req.cors.maxAgeInSeconds + ); + response.addHttpProperty(N.ACCESS_CONTROL_ALLOW_CREDENTIALS, true); // Refactor into response + res.set(response.httpProps); + res.status(200).send(); + } } -module.exports = new PreflightBlobRequest(); \ No newline at end of file +module.exports = new PreflightBlobRequest(); diff --git a/lib/actions/blob/PutAppendBlock.js b/lib/actions/blob/PutAppendBlock.js index f90a8a4fe..d591309c5 100644 --- a/lib/actions/blob/PutAppendBlock.js +++ b/lib/actions/blob/PutAppendBlock.js @@ -1,23 +1,32 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"); class PutAppendBlock { - constructor() { - } + constructor() {} - process(azuriteRequest, res) { - storageManager.putAppendBlock(azuriteRequest) - .then((response) => { - response.addHttpProperty(N.REQUEST_SERVER_ENCRYPTED, false); - response.addHttpProperty(N.CONTENT_MD5, azuriteRequest.calculateContentMd5()); - response.addHttpProperty(N.BLOB_COMMITTED_BLOCK_COUNT, response.proxy.original[N.BLOB_COMMITTED_BLOCK_COUNT]); - response.addHttpProperty(N.BLOB_APPEND_OFFSET, response.proxy.original.size - azuriteRequest.body.length); - res.set(response.httpProps); - res.status(201).send(); - }); - } + process(azuriteRequest, res) { + storageManager.putAppendBlock(azuriteRequest).then((response) => { + response.addHttpProperty(N.REQUEST_SERVER_ENCRYPTED, false); + response.addHttpProperty( + N.CONTENT_MD5, + azuriteRequest.calculateContentMd5() + ); + response.addHttpProperty( + N.BLOB_COMMITTED_BLOCK_COUNT, + response.proxy.original[N.BLOB_COMMITTED_BLOCK_COUNT] + ); + response.addHttpProperty( + N.BLOB_APPEND_OFFSET, + response.proxy.original.size - azuriteRequest.body.length + ); + res.set(response.httpProps); + res.status(201).send(); + }); + } } -module.exports = new PutAppendBlock(); \ No newline at end of file +module.exports = new PutAppendBlock(); diff --git a/lib/actions/blob/PutBlob.js b/lib/actions/blob/PutBlob.js index 16d151cee..b73b7de2d 100644 --- a/lib/actions/blob/PutBlob.js +++ b/lib/actions/blob/PutBlob.js @@ -1,19 +1,19 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"); class PutBlob { - constructor() { - } + constructor() {} - process(azuriteRequest, res) { - storageManager.putBlob(azuriteRequest) - .then((response) => { - response.addHttpProperty('x-ms-request-server-encrypted', false); - res.set(response.httpProps); - res.status(201).send(); - }); - } + process(azuriteRequest, res) { + storageManager.putBlob(azuriteRequest).then((response) => { + response.addHttpProperty("x-ms-request-server-encrypted", false); + res.set(response.httpProps); + res.status(201).send(); + }); + } } -module.exports = new PutBlob(); \ No newline at end of file +module.exports = new PutBlob(); diff --git a/lib/actions/blob/PutBlock.js b/lib/actions/blob/PutBlock.js index 49981f364..2742f0e79 100644 --- a/lib/actions/blob/PutBlock.js +++ b/lib/actions/blob/PutBlock.js @@ -1,20 +1,20 @@ -'use strict'; -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'); +/** @format */ + +"use strict"; +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"); class PutBlock { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.putBlock(request) - .then((response) => { - response.addHttpProperty(N.CONTENT_MD5, request.calculateContentMd5()); - response.addHttpProperty(N.REQUEST_SERVER_ENCRYPTED, false); - res.set(response.httpProps); - res.status(201).send(); - }); - } + process(request, res) { + storageManager.putBlock(request).then((response) => { + response.addHttpProperty(N.CONTENT_MD5, request.calculateContentMd5()); + response.addHttpProperty(N.REQUEST_SERVER_ENCRYPTED, false); + res.set(response.httpProps); + res.status(201).send(); + }); + } } -module.exports = new PutBlock(); \ No newline at end of file +module.exports = new PutBlock(); diff --git a/lib/actions/blob/PutBlockList.js b/lib/actions/blob/PutBlockList.js index 3046dd1ef..14e4f925c 100644 --- a/lib/actions/blob/PutBlockList.js +++ b/lib/actions/blob/PutBlockList.js @@ -1,22 +1,22 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'), - AError = require('./../../core/AzuriteError'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"), + AError = require("./../../core/AzuriteError"); class PutBlockList { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.putBlockList(request) - .then((response) => { - response.addHttpProperty(N.CONTENT_MD5, request.calculateContentMd5()); - response.addHttpProperty(N.REQUEST_SERVER_ENCRYPTED, false); - res.set(response.httpProps); - res.status(201).send(); - }); - } + process(request, res) { + storageManager.putBlockList(request).then((response) => { + response.addHttpProperty(N.CONTENT_MD5, request.calculateContentMd5()); + response.addHttpProperty(N.REQUEST_SERVER_ENCRYPTED, false); + res.set(response.httpProps); + res.status(201).send(); + }); + } } -module.exports = new PutBlockList(); \ No newline at end of file +module.exports = new PutBlockList(); diff --git a/lib/actions/blob/PutPage.js b/lib/actions/blob/PutPage.js index c7dda9f78..2318dac13 100644 --- a/lib/actions/blob/PutPage.js +++ b/lib/actions/blob/PutPage.js @@ -1,20 +1,20 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"); class PutPage { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.putPage(request) - .then((response) => { - response.addHttpProperty(N.REQUEST_SERVER_ENCRYPTED, false); - res.set(response.httpProps); - res.status(201).send(); - }); - } + process(request, res) { + storageManager.putPage(request).then((response) => { + response.addHttpProperty(N.REQUEST_SERVER_ENCRYPTED, false); + res.set(response.httpProps); + res.status(201).send(); + }); + } } -module.exports = new PutPage(); \ No newline at end of file +module.exports = new PutPage(); diff --git a/lib/actions/blob/SetBlobMetadata.js b/lib/actions/blob/SetBlobMetadata.js index ab5966dc0..df885f7c0 100644 --- a/lib/actions/blob/SetBlobMetadata.js +++ b/lib/actions/blob/SetBlobMetadata.js @@ -1,18 +1,18 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"); class SetBlobMetadata { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.setBlobMetadata(request) - .then((response) => { - res.set(response.httpProps); - res.status(200).send(); - }); - } + process(request, res) { + storageManager.setBlobMetadata(request).then((response) => { + res.set(response.httpProps); + res.status(200).send(); + }); + } } -module.exports = new SetBlobMetadata(); \ No newline at end of file +module.exports = new SetBlobMetadata(); diff --git a/lib/actions/blob/SetBlobProperties.js b/lib/actions/blob/SetBlobProperties.js index 119b8c61f..0366eec41 100644 --- a/lib/actions/blob/SetBlobProperties.js +++ b/lib/actions/blob/SetBlobProperties.js @@ -1,18 +1,18 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"); class SetBlobProperties { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.setBlobProperties(request) - .then((response) => { - res.set(response.httpProps); - res.status(200).send(); - }); - } + process(request, res) { + storageManager.setBlobProperties(request).then((response) => { + res.set(response.httpProps); + res.status(200).send(); + }); + } } module.exports = new SetBlobProperties(); diff --git a/lib/actions/blob/SetBlobServiceProperties.js b/lib/actions/blob/SetBlobServiceProperties.js index e355bb62c..a8a234918 100644 --- a/lib/actions/blob/SetBlobServiceProperties.js +++ b/lib/actions/blob/SetBlobServiceProperties.js @@ -1,18 +1,18 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"); class SetBlobServiceProperties { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.setBlobServiceProperties(request) - .then((response) => { - res.set(response.httpProps); - res.status(202).send(); - }); - } + process(request, res) { + storageManager.setBlobServiceProperties(request).then((response) => { + res.set(response.httpProps); + res.status(202).send(); + }); + } } module.exports = new SetBlobServiceProperties(); diff --git a/lib/actions/blob/SetContainerAcl.js b/lib/actions/blob/SetContainerAcl.js index f0e8160ce..dda5f4d6a 100644 --- a/lib/actions/blob/SetContainerAcl.js +++ b/lib/actions/blob/SetContainerAcl.js @@ -1,18 +1,18 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"); class SetContainerAcl { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.setContainerAcl(request) - .then((response) => { - res.set(response.httpProps); - res.status(200).send(); - }); - } + process(request, res) { + storageManager.setContainerAcl(request).then((response) => { + res.set(response.httpProps); + res.status(200).send(); + }); + } } -module.exports = new SetContainerAcl(); \ No newline at end of file +module.exports = new SetContainerAcl(); diff --git a/lib/actions/blob/SetContainerMetadata.js b/lib/actions/blob/SetContainerMetadata.js index bd47e9b43..045c6d585 100644 --- a/lib/actions/blob/SetContainerMetadata.js +++ b/lib/actions/blob/SetContainerMetadata.js @@ -1,18 +1,18 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"); class SetContainerMetadata { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.setContainerMetadata(request) - .then((response) => { - res.set(response.httpProps); - res.status(200).send(); - }); - } + process(request, res) { + storageManager.setContainerMetadata(request).then((response) => { + res.set(response.httpProps); + res.status(200).send(); + }); + } } -module.exports = new SetContainerMetadata(); \ No newline at end of file +module.exports = new SetContainerMetadata(); diff --git a/lib/actions/blob/SnapshotBlob.js b/lib/actions/blob/SnapshotBlob.js index 355b3b51c..9415ae33a 100644 --- a/lib/actions/blob/SnapshotBlob.js +++ b/lib/actions/blob/SnapshotBlob.js @@ -1,20 +1,23 @@ -'use strict'; +/** @format */ -const storageManager = require('./../../core/blob/StorageManager'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const storageManager = require("./../../core/blob/StorageManager"), + N = require("./../../core/HttpHeaderNames"); class SnapshotBlob { - constructor() { - } + constructor() {} - process(request, res) { - storageManager.snapshotBlob(request) - .then((response) => { - response.addHttpProperty(N.SNAPSHOT_DATE, response.proxy.original.snapshotDate); - res.set(response.httpProps); - res.status(201).send(); - }); - } + process(request, res) { + storageManager.snapshotBlob(request).then((response) => { + response.addHttpProperty( + N.SNAPSHOT_DATE, + response.proxy.original.snapshotDate + ); + res.set(response.httpProps); + res.status(201).send(); + }); + } } -module.exports = new SnapshotBlob(); \ No newline at end of file +module.exports = new SnapshotBlob(); diff --git a/lib/actions/queue/ClearMessages.js b/lib/actions/queue/ClearMessages.js index 2063c6056..a82b1ce8c 100644 --- a/lib/actions/queue/ClearMessages.js +++ b/lib/actions/queue/ClearMessages.js @@ -1,19 +1,22 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'); +"use strict"; + +const QueueManager = require("./../../core/queue/QueueManager"), + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"); class ClearMessages { - constructor() { - } + constructor() {} - process(request, res) { - const queue = QueueManager.getQueueAndMessage({ queueName: request.queueName }).queue; - queue.clear(); - const response = new AzuriteQueueResponse(); - res.set(response.httpProps); - res.status(204).send(); - } + process(request, res) { + const queue = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + }).queue; + queue.clear(); + const response = new AzuriteQueueResponse(); + res.set(response.httpProps); + res.status(204).send(); + } } -module.exports = new ClearMessages(); \ No newline at end of file +module.exports = new ClearMessages(); diff --git a/lib/actions/queue/CreateQueue.js b/lib/actions/queue/CreateQueue.js index f469cf358..8cb006d86 100644 --- a/lib/actions/queue/CreateQueue.js +++ b/lib/actions/queue/CreateQueue.js @@ -1,25 +1,28 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'); +"use strict"; -class CreateQueue { - constructor() { - } +const QueueManager = require("./../../core/queue/QueueManager"), + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"); - process(request, res) { - const { queue } = QueueManager.getQueueAndMessage({ queueName: request.queueName }); - const response = new AzuriteQueueResponse(); - res.set(response.httpProps); - if (queue !== undefined) { - // Queue already exists, and existing metadata is identical to the metadata specified on the Create Queue request - res.status(204).send(); - return; - } +class CreateQueue { + constructor() {} - QueueManager.add({ name: request.queueName, metaProps: request.metaProps }); - res.status(201).send(); + process(request, res) { + const { queue } = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + }); + const response = new AzuriteQueueResponse(); + res.set(response.httpProps); + if (queue !== undefined) { + // Queue already exists, and existing metadata is identical to the metadata specified on the Create Queue request + res.status(204).send(); + return; } + + QueueManager.add({ name: request.queueName, metaProps: request.metaProps }); + res.status(201).send(); + } } -module.exports = new CreateQueue(); \ No newline at end of file +module.exports = new CreateQueue(); diff --git a/lib/actions/queue/DeleteMessage.js b/lib/actions/queue/DeleteMessage.js index 5334a3492..09be1532e 100644 --- a/lib/actions/queue/DeleteMessage.js +++ b/lib/actions/queue/DeleteMessage.js @@ -1,19 +1,22 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'); +"use strict"; + +const QueueManager = require("./../../core/queue/QueueManager"), + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"); class DeleteMessage { - constructor() { - } + constructor() {} - process(request, res) { - const queue = QueueManager.getQueueAndMessage({ queueName: request.queueName }).queue; - queue.delete(request.messageId, request.popReceipt); - const response = new AzuriteQueueResponse(); - res.set(response.httpProps); - res.status(204).send(); - } + process(request, res) { + const queue = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + }).queue; + queue.delete(request.messageId, request.popReceipt); + const response = new AzuriteQueueResponse(); + res.set(response.httpProps); + res.status(204).send(); + } } -module.exports = new DeleteMessage(); \ No newline at end of file +module.exports = new DeleteMessage(); diff --git a/lib/actions/queue/DeleteQueue.js b/lib/actions/queue/DeleteQueue.js index 72ad66a99..04ddd6b31 100644 --- a/lib/actions/queue/DeleteQueue.js +++ b/lib/actions/queue/DeleteQueue.js @@ -1,18 +1,19 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'); +"use strict"; + +const QueueManager = require("./../../core/queue/QueueManager"), + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"); class DeleteQueue { - constructor() { - } + constructor() {} - process(request, res) { - QueueManager.delete(request.queueName); - const response = new AzuriteQueueResponse(); - res.set(response.httpProps); - res.status(204).send(); - } + process(request, res) { + QueueManager.delete(request.queueName); + const response = new AzuriteQueueResponse(); + res.set(response.httpProps); + res.status(204).send(); + } } -module.exports = new DeleteQueue(); \ No newline at end of file +module.exports = new DeleteQueue(); diff --git a/lib/actions/queue/GetMessages.js b/lib/actions/queue/GetMessages.js index d5f1c4003..40fc217dd 100644 --- a/lib/actions/queue/GetMessages.js +++ b/lib/actions/queue/GetMessages.js @@ -1,36 +1,46 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - QueueMessagesListXmlModel = require('./../../xml/queue/QueueMessageList').QueueMessageListXmlModel, - QueueMessageXmlModel = require('./../../xml/queue/QueueMessageList').QueueMessageXmlModel, - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const QueueManager = require("./../../core/queue/QueueManager"), + QueueMessagesListXmlModel = require("./../../xml/queue/QueueMessageList") + .QueueMessageListXmlModel, + QueueMessageXmlModel = require("./../../xml/queue/QueueMessageList") + .QueueMessageXmlModel, + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"), + N = require("./../../core/HttpHeaderNames"); class GetMessages { - constructor() { - } + constructor() {} - process(request, res) { - const queue = QueueManager.getQueueAndMessage({ queueName: request.queueName }).queue; - const messages = queue.gett({ numOfMessages: request.numOfMessages, visibilityTimeout: request.visibilityTimeout }); - const model = new QueueMessagesListXmlModel(); - for (const msg of messages) { - model.add(new QueueMessageXmlModel({ - messageId: msg.messageId, - expirationTime: new Date(msg.expirationTime * 1000).toUTCString(), - insertionTime: new Date(msg.insertionTime * 1000).toUTCString(), - popReceipt: msg.popReceipt, - timeNextVisible: new Date(msg.timeNextVisible * 1000).toUTCString(), - dequeueCount: msg.dequeueCount, - messageText: msg.msg - })); - } - const xmlBody = model.toXml(); - const response = new AzuriteQueueResponse(); - response.addHttpProperty(N.CONTENT_TYPE, 'application/xml'); - res.set(response.httpProps); - res.status(200).send(xmlBody); + process(request, res) { + const queue = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + }).queue; + const messages = queue.gett({ + numOfMessages: request.numOfMessages, + visibilityTimeout: request.visibilityTimeout, + }); + const model = new QueueMessagesListXmlModel(); + for (const msg of messages) { + model.add( + new QueueMessageXmlModel({ + messageId: msg.messageId, + expirationTime: new Date(msg.expirationTime * 1000).toUTCString(), + insertionTime: new Date(msg.insertionTime * 1000).toUTCString(), + popReceipt: msg.popReceipt, + timeNextVisible: new Date(msg.timeNextVisible * 1000).toUTCString(), + dequeueCount: msg.dequeueCount, + messageText: msg.msg, + }) + ); } + const xmlBody = model.toXml(); + const response = new AzuriteQueueResponse(); + response.addHttpProperty(N.CONTENT_TYPE, "application/xml"); + res.set(response.httpProps); + res.status(200).send(xmlBody); + } } -module.exports = new GetMessages(); \ No newline at end of file +module.exports = new GetMessages(); diff --git a/lib/actions/queue/GetQueueAcl.js b/lib/actions/queue/GetQueueAcl.js index 966641c41..8ee9ae8a1 100644 --- a/lib/actions/queue/GetQueueAcl.js +++ b/lib/actions/queue/GetQueueAcl.js @@ -1,24 +1,30 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - js2xmlparser = require("js2xmlparser"), - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const QueueManager = require("./../../core/queue/QueueManager"), + js2xmlparser = require("js2xmlparser"), + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"), + N = require("./../../core/HttpHeaderNames"); class GetQueueAcl { - constructor() { - } + constructor() {} - process(request, res) { - const queue = QueueManager.getQueueAndMessage({ queueName: request.queueName }).queue, - signedIdentifiers = queue.getAcl(); - let xml = js2xmlparser.parse('SignedIdentifiers', signedIdentifiers || {}); - xml = xml.replace(``, ``); - const response = new AzuriteQueueResponse(); - response.addHttpProperty(N.CONTENT_TYPE, 'application/xml'); - res.set(response.httpProps); - res.status(200).send(xml); - } + process(request, res) { + const queue = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + }).queue, + signedIdentifiers = queue.getAcl(); + let xml = js2xmlparser.parse("SignedIdentifiers", signedIdentifiers || {}); + xml = xml.replace( + ``, + `` + ); + const response = new AzuriteQueueResponse(); + response.addHttpProperty(N.CONTENT_TYPE, "application/xml"); + res.set(response.httpProps); + res.status(200).send(xml); + } } -module.exports = new GetQueueAcl(); \ No newline at end of file +module.exports = new GetQueueAcl(); diff --git a/lib/actions/queue/GetQueueMetadata.js b/lib/actions/queue/GetQueueMetadata.js index e95f3784d..a6ff7ffe7 100644 --- a/lib/actions/queue/GetQueueMetadata.js +++ b/lib/actions/queue/GetQueueMetadata.js @@ -1,23 +1,26 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - N = require('./../../core/HttpHeaderNames'), - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'); +"use strict"; + +const QueueManager = require("./../../core/queue/QueueManager"), + N = require("./../../core/HttpHeaderNames"), + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"); class GetQueueMetadata { - constructor() { - } + constructor() {} - process(request, res) { - const queue = QueueManager.getQueueAndMessage({ queueName: request.queueName }).queue, - metaProps = queue.metaProps, - queueLength = queue.getLength(), - response = new AzuriteQueueResponse(); - response.addMetaProps(metaProps); - response.addHttpProperty(N.APPROXIMATE_MESSAGES_COUNT, queueLength); - res.set(response.httpProps); - res.status(200).send(); - } + process(request, res) { + const queue = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + }).queue, + metaProps = queue.metaProps, + queueLength = queue.getLength(), + response = new AzuriteQueueResponse(); + response.addMetaProps(metaProps); + response.addHttpProperty(N.APPROXIMATE_MESSAGES_COUNT, queueLength); + res.set(response.httpProps); + res.status(200).send(); + } } module.exports = new GetQueueMetadata(); diff --git a/lib/actions/queue/ListQueues.js b/lib/actions/queue/ListQueues.js index 4a85f8d5b..4b3f0abd1 100644 --- a/lib/actions/queue/ListQueues.js +++ b/lib/actions/queue/ListQueues.js @@ -1,36 +1,41 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - QueueListXmlModel = require('./../../xml/queue/QueueList').QueueList, - QueueXmlModel = require('./../../xml/queue/QueueList').Queue, - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const QueueManager = require("./../../core/queue/QueueManager"), + QueueListXmlModel = require("./../../xml/queue/QueueList").QueueList, + QueueXmlModel = require("./../../xml/queue/QueueList").Queue, + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"), + N = require("./../../core/HttpHeaderNames"); class ListQueues { - constructor() { - } + constructor() {} - process(request, res) { - const query = request.query; - const { queues, nextMarker } = QueueManager.listQueues({ prefix: query.prefix, marker: query.marker, maxresults: query.maxresults }); + process(request, res) { + const query = request.query; + const { queues, nextMarker } = QueueManager.listQueues({ + prefix: query.prefix, + marker: query.marker, + maxresults: query.maxresults, + }); - const xmlModel = new QueueListXmlModel(); - for (const queue of queues) { - const xmlQueue = new QueueXmlModel(queue.name); - if (request.query.include === 'metadata') { - xmlQueue.addMetadata(queue.metaProps); - } - xmlModel.add(xmlQueue); - } - if (nextMarker !== undefined) { - xmlModel.NextMarker = nextMarker; - } - const xmlString = xmlModel.toXml(); - const response = new AzuriteQueueResponse(); - response.addHttpProperty(N.CONTENT_TYPE, 'application/xml'); - res.set(response.httpProps); - res.status(200).send(xmlString); + const xmlModel = new QueueListXmlModel(); + for (const queue of queues) { + const xmlQueue = new QueueXmlModel(queue.name); + if (request.query.include === "metadata") { + xmlQueue.addMetadata(queue.metaProps); + } + xmlModel.add(xmlQueue); + } + if (nextMarker !== undefined) { + xmlModel.NextMarker = nextMarker; } + const xmlString = xmlModel.toXml(); + const response = new AzuriteQueueResponse(); + response.addHttpProperty(N.CONTENT_TYPE, "application/xml"); + res.set(response.httpProps); + res.status(200).send(xmlString); + } } -module.exports = new ListQueues(); \ No newline at end of file +module.exports = new ListQueues(); diff --git a/lib/actions/queue/PeekMessages.js b/lib/actions/queue/PeekMessages.js index e7dc81fed..f6868e6ab 100644 --- a/lib/actions/queue/PeekMessages.js +++ b/lib/actions/queue/PeekMessages.js @@ -1,34 +1,41 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - QueueMessagesListXmlModel = require('./../../xml/queue/QueueMessageList').QueueMessageListXmlModel, - QueueMessageXmlModel = require('./../../xml/queue/QueueMessageList').QueueMessageXmlModel, - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const QueueManager = require("./../../core/queue/QueueManager"), + QueueMessagesListXmlModel = require("./../../xml/queue/QueueMessageList") + .QueueMessageListXmlModel, + QueueMessageXmlModel = require("./../../xml/queue/QueueMessageList") + .QueueMessageXmlModel, + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"), + N = require("./../../core/HttpHeaderNames"); class PeekMessages { - constructor() { - } + constructor() {} - process(request, res) { - const queue = QueueManager.getQueueAndMessage({ queueName: request.queueName }).queue; - const messages = queue.peek(request.numOfMessages); - const model = new QueueMessagesListXmlModel(); - for (const msg of messages) { - model.add(new QueueMessageXmlModel({ - messageId: msg.messageId, - expirationTime: new Date(msg.expirationTime * 1000).toUTCString(), - insertionTime: new Date(msg.insertionTime * 1000).toUTCString(), - dequeueCount: msg.dequeueCount, - messageText: msg.msg - })); - } - const xmlBody = model.toXml(); - const response = new AzuriteQueueResponse(); - response.addHttpProperty(N.CONTENT_TYPE, 'application/xml'); - res.set(response.httpProps); - res.status(200).send(xmlBody); + process(request, res) { + const queue = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + }).queue; + const messages = queue.peek(request.numOfMessages); + const model = new QueueMessagesListXmlModel(); + for (const msg of messages) { + model.add( + new QueueMessageXmlModel({ + messageId: msg.messageId, + expirationTime: new Date(msg.expirationTime * 1000).toUTCString(), + insertionTime: new Date(msg.insertionTime * 1000).toUTCString(), + dequeueCount: msg.dequeueCount, + messageText: msg.msg, + }) + ); } + const xmlBody = model.toXml(); + const response = new AzuriteQueueResponse(); + response.addHttpProperty(N.CONTENT_TYPE, "application/xml"); + res.set(response.httpProps); + res.status(200).send(xmlBody); + } } -module.exports = new PeekMessages(); \ No newline at end of file +module.exports = new PeekMessages(); diff --git a/lib/actions/queue/PutMessage.js b/lib/actions/queue/PutMessage.js index 2b789797b..bb0eab9ba 100644 --- a/lib/actions/queue/PutMessage.js +++ b/lib/actions/queue/PutMessage.js @@ -1,33 +1,44 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - QueueMessagesListXmlModel = require('./../../xml/queue/QueueMessageList').QueueMessageListXmlModel, - QueueMessageXmlModel = require('./../../xml/queue/QueueMessageList').QueueMessageXmlModel, - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const QueueManager = require("./../../core/queue/QueueManager"), + QueueMessagesListXmlModel = require("./../../xml/queue/QueueMessageList") + .QueueMessageListXmlModel, + QueueMessageXmlModel = require("./../../xml/queue/QueueMessageList") + .QueueMessageXmlModel, + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"), + N = require("./../../core/HttpHeaderNames"); class PutMessage { - constructor() { - } + constructor() {} - process(request, res) { - const { queue } = QueueManager.getQueueAndMessage({ queueName: request.queueName }); - const message = queue.put({ now: request.now, msg: request.payload.MessageText, visibilityTimeout: request.visibilityTimeout, messageTtl: request.messageTtl }); - const model = new QueueMessagesListXmlModel(); - model.add(new QueueMessageXmlModel( - { - messageId: message.messageId, - expirationTime: new Date(message.expirationTime*1000).toUTCString(), - insertionTime: new Date(message.insertionTime*1000).toUTCString(), - popReceipt: message.popReceipt, - timeNextVisible: new Date(message.timeNextVisible*1000).toUTCString() - })); - const xmlBody = model.toXml(); - const response = new AzuriteQueueResponse(); - response.addHttpProperty(N.CONTENT_TYPE, 'application/xml'); - res.set(response.httpProps); - res.status(201).send(xmlBody); - } + process(request, res) { + const { queue } = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + }); + const message = queue.put({ + now: request.now, + msg: request.payload.MessageText, + visibilityTimeout: request.visibilityTimeout, + messageTtl: request.messageTtl, + }); + const model = new QueueMessagesListXmlModel(); + model.add( + new QueueMessageXmlModel({ + messageId: message.messageId, + expirationTime: new Date(message.expirationTime * 1000).toUTCString(), + insertionTime: new Date(message.insertionTime * 1000).toUTCString(), + popReceipt: message.popReceipt, + timeNextVisible: new Date(message.timeNextVisible * 1000).toUTCString(), + }) + ); + const xmlBody = model.toXml(); + const response = new AzuriteQueueResponse(); + response.addHttpProperty(N.CONTENT_TYPE, "application/xml"); + res.set(response.httpProps); + res.status(201).send(xmlBody); + } } -module.exports = new PutMessage(); \ No newline at end of file +module.exports = new PutMessage(); diff --git a/lib/actions/queue/SetQueueAcl.js b/lib/actions/queue/SetQueueAcl.js index 785fc6893..d8ee0d9e6 100644 --- a/lib/actions/queue/SetQueueAcl.js +++ b/lib/actions/queue/SetQueueAcl.js @@ -1,19 +1,22 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'); +"use strict"; + +const QueueManager = require("./../../core/queue/QueueManager"), + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"); class SetQueueAcl { - constructor() { - } + constructor() {} - process(request, res) { - const queue = QueueManager.getQueueAndMessage({queueName: request.queueName}).queue; - queue.addAcl(request.payload.SignedIdentifier); - const response = new AzuriteQueueResponse(); - res.set(response.httpProps); - res.status(204).send(); - } + process(request, res) { + const queue = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + }).queue; + queue.addAcl(request.payload.SignedIdentifier); + const response = new AzuriteQueueResponse(); + res.set(response.httpProps); + res.status(204).send(); + } } -module.exports = new SetQueueAcl(); \ No newline at end of file +module.exports = new SetQueueAcl(); diff --git a/lib/actions/queue/SetQueueMetadata.js b/lib/actions/queue/SetQueueMetadata.js index eb646a93e..51c0fa207 100644 --- a/lib/actions/queue/SetQueueMetadata.js +++ b/lib/actions/queue/SetQueueMetadata.js @@ -1,18 +1,19 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'); +"use strict"; + +const QueueManager = require("./../../core/queue/QueueManager"), + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"); class SetQueueMetadata { - constructor() { - } + constructor() {} - process(request, res) { - QueueManager.setQueueMetadata(request); - const response = new AzuriteQueueResponse(); - res.set(response.httpProps); - res.status(204).send(); - } + process(request, res) { + QueueManager.setQueueMetadata(request); + const response = new AzuriteQueueResponse(); + res.set(response.httpProps); + res.status(204).send(); + } } -module.exports = new SetQueueMetadata(); \ No newline at end of file +module.exports = new SetQueueMetadata(); diff --git a/lib/actions/queue/UpdateMessage.js b/lib/actions/queue/UpdateMessage.js index b2580bc49..2b391e784 100644 --- a/lib/actions/queue/UpdateMessage.js +++ b/lib/actions/queue/UpdateMessage.js @@ -1,25 +1,38 @@ -'use strict'; +/** @format */ -const QueueManager = require('./../../core/queue/QueueManager'), - QueueMessagesListXmlModel = require('./../../xml/queue/QueueMessageList').QueueMessageListXmlModel, - QueueMessageXmlModel = require('./../../xml/queue/QueueMessageList').QueueMessageXmlModel, - N = require('./../../core/HttpHeaderNames'), - AzuriteQueueResponse = require('./../../model/queue/AzuriteQueueResponse'); +"use strict"; + +const QueueManager = require("./../../core/queue/QueueManager"), + QueueMessagesListXmlModel = require("./../../xml/queue/QueueMessageList") + .QueueMessageListXmlModel, + QueueMessageXmlModel = require("./../../xml/queue/QueueMessageList") + .QueueMessageXmlModel, + N = require("./../../core/HttpHeaderNames"), + AzuriteQueueResponse = require("./../../model/queue/AzuriteQueueResponse"); class UpdateMessage { - constructor() { - } + constructor() {} - process(request, res) { - const queue = QueueManager.getQueueAndMessage({ queueName: request.queueName }).queue, - message = queue.update({ messageId: request.messageId, popReceipt: request.popReceipt, visibilityTimeout: request.visibilityTimeout, msg: request.payload.MessageText }), - response = new AzuriteQueueResponse(); + process(request, res) { + const queue = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + }).queue, + message = queue.update({ + messageId: request.messageId, + popReceipt: request.popReceipt, + visibilityTimeout: request.visibilityTimeout, + msg: request.payload.MessageText, + }), + response = new AzuriteQueueResponse(); - response.addHttpProperty(N.POP_RECEIPT, message.popReceipt); - response.addHttpProperty(N.VISIBLE_NEXT_TIME, new Date(message.timeNextVisible * 1000).toUTCString()); - res.set(response.httpProps); - res.status(204).send(); - } + response.addHttpProperty(N.POP_RECEIPT, message.popReceipt); + response.addHttpProperty( + N.VISIBLE_NEXT_TIME, + new Date(message.timeNextVisible * 1000).toUTCString() + ); + res.set(response.httpProps); + res.status(204).send(); + } } -module.exports = new UpdateMessage(); \ No newline at end of file +module.exports = new UpdateMessage(); diff --git a/lib/actions/table/CreateTable.js b/lib/actions/table/CreateTable.js index 2f53d7db5..cb16ad08d 100644 --- a/lib/actions/table/CreateTable.js +++ b/lib/actions/table/CreateTable.js @@ -1,26 +1,26 @@ -'use strict'; +/** @format */ -const AzuriteTableResponse = require('./../../model/table/AzuriteTableResponse'), - tableStorageManager = require('./../../core/table/TableStorageManager'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const AzuriteTableResponse = require("./../../model/table/AzuriteTableResponse"), + tableStorageManager = require("./../../core/table/TableStorageManager"), + N = require("./../../core/HttpHeaderNames"); class CreateTable { - constructor() { - } + constructor() {} - process(request, res) { - tableStorageManager.createTable(request) - .then((response) => { - res.set(request.httpProps); - if (request.httpProps[N.PREFER] === 'return-no-content') { - response.addHttpProperty(N.PREFERENCE_APPLIED, 'return-no-content'); - res.status(204).send(); - return; - } - response.addHttpProperty(N.PREFERENCE_APPLIED, 'return-content'); - res.status(201).send(response.proxy.odata(request.accept)); - }); - } + process(request, res) { + tableStorageManager.createTable(request).then((response) => { + res.set(request.httpProps); + if (request.httpProps[N.PREFER] === "return-no-content") { + response.addHttpProperty(N.PREFERENCE_APPLIED, "return-no-content"); + res.status(204).send(); + return; + } + response.addHttpProperty(N.PREFERENCE_APPLIED, "return-content"); + res.status(201).send(response.proxy.odata(request.accept)); + }); + } } -module.exports = new CreateTable(); \ No newline at end of file +module.exports = new CreateTable(); diff --git a/lib/actions/table/DeleteEntity.js b/lib/actions/table/DeleteEntity.js index 63637e4b2..38d0a5b3e 100644 --- a/lib/actions/table/DeleteEntity.js +++ b/lib/actions/table/DeleteEntity.js @@ -1,20 +1,20 @@ -'use strict'; +/** @format */ -const AzuriteTableResponse = require('./../../model/table/AzuriteTableResponse'), - tableStorageManager = require('./../../core/table/TableStorageManager'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const AzuriteTableResponse = require("./../../model/table/AzuriteTableResponse"), + tableStorageManager = require("./../../core/table/TableStorageManager"), + N = require("./../../core/HttpHeaderNames"); class DeleteEntity { - constructor() { - } + constructor() {} - process(request, res) { - tableStorageManager.deleteEntity(request) - .then((response) => { - res.set(request.httpProps); - res.status(204).send(); - }); - } + process(request, res) { + tableStorageManager.deleteEntity(request).then((response) => { + res.set(request.httpProps); + res.status(204).send(); + }); + } } -module.exports = new DeleteEntity; \ No newline at end of file +module.exports = new DeleteEntity(); diff --git a/lib/actions/table/DeleteTable.js b/lib/actions/table/DeleteTable.js index 9aec31dd6..7fa2ad720 100644 --- a/lib/actions/table/DeleteTable.js +++ b/lib/actions/table/DeleteTable.js @@ -1,20 +1,20 @@ -'use strict'; +/** @format */ -const AzuriteTableResponse = require('./../../model/table/AzuriteTableResponse'), - tableStorageManager = require('./../../core/table/TableStorageManager'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const AzuriteTableResponse = require("./../../model/table/AzuriteTableResponse"), + tableStorageManager = require("./../../core/table/TableStorageManager"), + N = require("./../../core/HttpHeaderNames"); class DeleteTable { - constructor() { - } + constructor() {} - process(request, res) { - tableStorageManager.deleteTable(request) - .then((response) => { - res.set(request.httpProps); - res.status(201).send(); - }); - } + process(request, res) { + tableStorageManager.deleteTable(request).then((response) => { + res.set(request.httpProps); + res.status(201).send(); + }); + } } -module.exports = new DeleteTable; \ No newline at end of file +module.exports = new DeleteTable(); diff --git a/lib/actions/table/InsertEntity.js b/lib/actions/table/InsertEntity.js index a7a253a75..ead97abff 100644 --- a/lib/actions/table/InsertEntity.js +++ b/lib/actions/table/InsertEntity.js @@ -1,28 +1,40 @@ -'use strict'; +/** @format */ -const AzuriteTableResponse = require('./../../model/table/AzuriteTableResponse'), - tableStorageManager = require('./../../core/table/TableStorageManager'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; +const AzuriteTableResponse = require("./../../model/table/AzuriteTableResponse"), + tableStorageManager = require("./../../core/table/TableStorageManager"), + N = require("./../../core/HttpHeaderNames"); +// see: https://docs.microsoft.com/en-us/rest/api/storageservices/insert-entity class InsertEntity { - constructor() { - } + constructor() {} - process(request, res) { - tableStorageManager.insertEntity(request) - .then((response) => { - if (request.httpProps[N.PREFER] === 'return-no-content') { - response.addHttpProperty(N.PREFERENCE_APPLIED, 'return-no-content'); - res.status(204).send(); - return; - } - const payload = Object.assign({}, response.proxy.odata(request.accept), response.proxy.attribs(request.accept)); - response.addHttpProperty(N.ETAG, response.proxy.etag); - response.addHttpProperty(N.PREFERENCE_APPLIED, 'return-content'); - res.set(response.httpProps); - res.status(201).send(payload); - }); - } + process(request, res) { + tableStorageManager.insertEntity(request).then((response) => { + // If the request includes the Prefer header with the value + // return-no-content, no response body is returned. + // Otherwise, the response body is an OData entity set. + if (request.httpProps[N.PREFER] === "return-no-content") { + response.addHttpProperty(N.PREFERENCE_APPLIED, "return-no-content"); + const payload = Object.assign({ + ".metadata": { + etag: response.proxy.etag, + }, + }); + res.status(204).send(payload); + } else { + const payload = Object.assign( + {}, + response.proxy.odata(request.accept), + response.proxy.attribs(request.accept) + ); + response.addHttpProperty(N.ETAG, response.proxy.etag); + response.addHttpProperty(N.PREFERENCE_APPLIED, "return-content"); + res.set(response.httpProps); + res.status(201).send(payload); + } + }); + } } -module.exports = new InsertEntity; \ No newline at end of file +module.exports = new InsertEntity(); diff --git a/lib/actions/table/InsertOrMergeEntity.js b/lib/actions/table/InsertOrMergeEntity.js index ae120d38d..a77eea5a2 100644 --- a/lib/actions/table/InsertOrMergeEntity.js +++ b/lib/actions/table/InsertOrMergeEntity.js @@ -1,21 +1,21 @@ -'use strict'; +/** @format */ -const AzuriteTableResponse = require('./../../model/table/AzuriteTableResponse'), - tableStorageManager = require('./../../core/table/TableStorageManager'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const AzuriteTableResponse = require("./../../model/table/AzuriteTableResponse"), + tableStorageManager = require("./../../core/table/TableStorageManager"), + N = require("./../../core/HttpHeaderNames"); class InsertOrMergeEntity { - constructor() { - } + constructor() {} - process(request, res) { - tableStorageManager.insertOrMergeEntity(request) - .then((response) => { - response.addHttpProperty(N.ETAG, response.proxy.etag); - res.set(response.httpProps); - res.status(204).send(); - }); - } + process(request, res) { + tableStorageManager.insertOrMergeEntity(request).then((response) => { + response.addHttpProperty(N.ETAG, response.proxy.etag); + res.set(response.httpProps); + res.status(204).send(); + }); + } } -module.exports = new InsertOrMergeEntity; \ No newline at end of file +module.exports = new InsertOrMergeEntity(); diff --git a/lib/actions/table/InsertOrReplaceEntity.js b/lib/actions/table/InsertOrReplaceEntity.js index 7b1d6df4f..56fdc0c3e 100644 --- a/lib/actions/table/InsertOrReplaceEntity.js +++ b/lib/actions/table/InsertOrReplaceEntity.js @@ -1,21 +1,21 @@ -'use strict'; +/** @format */ -const AzuriteTableResponse = require('./../../model/table/AzuriteTableResponse'), - tableStorageManager = require('./../../core/table/TableStorageManager'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const AzuriteTableResponse = require("./../../model/table/AzuriteTableResponse"), + tableStorageManager = require("./../../core/table/TableStorageManager"), + N = require("./../../core/HttpHeaderNames"); class InsertOrReplaceEntity { - constructor() { - } + constructor() {} - process(request, res) { - tableStorageManager.insertEntity(request) - .then((response) => { - response.addHttpProperty(N.ETAG, response.proxy.etag); - res.set(response.httpProps); - res.status(204).send(); - }); - } + process(request, res) { + tableStorageManager.insertEntity(request).then((response) => { + response.addHttpProperty(N.ETAG, response.proxy.etag); + res.set(response.httpProps); + res.status(204).send(); + }); + } } -module.exports = new InsertOrReplaceEntity; \ No newline at end of file +module.exports = new InsertOrReplaceEntity(); diff --git a/lib/actions/table/MergeEntity.js b/lib/actions/table/MergeEntity.js index fa51a51aa..b49b4d8a4 100644 --- a/lib/actions/table/MergeEntity.js +++ b/lib/actions/table/MergeEntity.js @@ -1,22 +1,22 @@ -'use strict'; +/** @format */ -const AzuriteTableResponse = require('./../../model/table/AzuriteTableResponse'), - tableStorageManager = require('./../../core/table/TableStorageManager'), - ODataMode = require('./../../core/Constants').ODataMode, - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const AzuriteTableResponse = require("./../../model/table/AzuriteTableResponse"), + tableStorageManager = require("./../../core/table/TableStorageManager"), + ODataMode = require("./../../core/Constants").ODataMode, + N = require("./../../core/HttpHeaderNames"); class MergeEntity { - constructor() { - } + constructor() {} - process(request, res) { - tableStorageManager.mergeEntity(request) - .then((response) => { - response.addHttpProperty(N.ETAG, response.proxy.etag); - res.set(response.httpProps); - res.status(204).send(); - }); - } + process(request, res) { + tableStorageManager.mergeEntity(request).then((response) => { + response.addHttpProperty(N.ETAG, response.proxy.etag); + res.set(response.httpProps); + res.status(204).send(); + }); + } } -module.exports = new MergeEntity; \ No newline at end of file +module.exports = new MergeEntity(); diff --git a/lib/actions/table/QueryEntities.js b/lib/actions/table/QueryEntities.js index 15f87e289..b0dca281e 100644 --- a/lib/actions/table/QueryEntities.js +++ b/lib/actions/table/QueryEntities.js @@ -1,44 +1,63 @@ -'use strict'; +/** @format */ -const AzuriteTableResponse = require('./../../model/table/AzuriteTableResponse'), - ODataMode = require('./../../core/Constants').ODataMode, - tableStorageManager = require('./../../core/table/TableStorageManager'); +"use strict"; + +const AzuriteTableResponse = require("./../../model/table/AzuriteTableResponse"), + ODataMode = require("./../../core/Constants").ODataMode, + tableStorageManager = require("./../../core/table/TableStorageManager"); class QueryEntities { - constructor() { - } + constructor() {} - process(request, res) { - tableStorageManager.queryEntities(request) - .then((response) => { - res.set(response.httpProps); - const payload = this._createResponsePayload(response.payload, request.tableName, request.accept); - res.status(200).send(payload); - }); - } + process(request, res) { + tableStorageManager.queryEntities(request).then((response) => { + res.set(response.httpProps); + const payload = this._createResponsePayload( + response.payload, + request.tableName, + request.accept, + request.singleEntity + ); + res.status(200).send(payload); + }); + } - _createResponsePayload(payload, tableName, accept) { - const response = {}; - if (accept !== ODataMode.NONE) { - response['odata.metadata'] = `http://127.0.0.1:10002/devstoreaccount1/$metadata#${tableName}`; - } - response.value = []; - let i = 0; - for (const item of payload) { - response.value.push(item.attribs(accept)); - response.value[i]['PartitionKey'] = item.partitionKey; - response.value[i]['RowKey'] = item.rowKey; - if (accept === ODataMode.FULL) { - const odataItems = item.odata(accept); - for (const key of odataItems) { - response.value[i][key] = odataItems[key]; - } - delete response.value[i]['odata.metadata']; - } - ++i; - } - return response; + _createResponsePayload(payload, tableName, accept, singleEntity) { + let response = {}; + + if (accept !== ODataMode.NONE) { + response[ + "odata.metadata" + ] = `http://127.0.0.1:10002/devstoreaccount1/$metadata#${tableName}`; + } + // case where we do not have an array + if (singleEntity) { + for (const item of payload) { + response["PartitionKey"] = item.partitionKey; + response["RowKey"] = item.rowKey; + response = Object.assign({}, response, item.attribs(accept)); + } + } else if (payload.length === 1) { + response["PartitionKey"] = payload[0].partitionKey; + response["RowKey"] = payload[0].rowKey; + for (const attrib in payload[0].attribs(accept)) { + response[attrib] = payload[0]._.attribs[attrib]; + } + } else if (payload.length > 1) { + response.value = []; + let i = 0; + for (const item of payload) { + response.value.push(item.attribs(accept)); + response.value[i]["PartitionKey"] = item.partitionKey; + response.value[i]["RowKey"] = item.rowKey; + // content is determined by the odata format + // https://docs.microsoft.com/en-us/rest/api/storageservices/payload-format-for-table-service-operations + ++i; + } } + + return response; + } } -module.exports = new QueryEntities(); \ No newline at end of file +module.exports = new QueryEntities(); diff --git a/lib/actions/table/QueryTable.js b/lib/actions/table/QueryTable.js index 563912c16..69df0dda0 100644 --- a/lib/actions/table/QueryTable.js +++ b/lib/actions/table/QueryTable.js @@ -1,37 +1,42 @@ -'use strict'; +/** @format */ -const AzuriteTableResponse = require('./../../model/table/AzuriteTableResponse'), - tableStorageManager = require('./../../core/table/TableStorageManager'), - ODataMode = require('./../../core/Constants').ODataMode, - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const AzuriteTableResponse = require("./../../model/table/AzuriteTableResponse"), + tableStorageManager = require("./../../core/table/TableStorageManager"), + ODataMode = require("./../../core/Constants").ODataMode, + N = require("./../../core/HttpHeaderNames"); class QueryTable { - constructor() { - } + constructor() {} - process(request, res) { - tableStorageManager.queryTable(request) - .then((response) => { - res.set(response.httpProps); - const payload = this._createResponsePayload(response.payload, request.accept); - res.status(200).send(payload); - }); - } + process(request, res) { + tableStorageManager.queryTable(request).then((response) => { + res.set(response.httpProps); + const payload = this._createResponsePayload( + response.payload, + request.accept + ); + res.status(200).send(payload); + }); + } - _createResponsePayload(payload, accept) { - const response = {}; - if (accept !== ODataMode.NONE) { - response['odata.metadata'] = `http://127.0.0.1:10002/devstoreaccount1/$metadata#Tables`; - } - response.value = []; - let i = 0; - for (const item of payload) { - response.value.push(item.odata(accept)) - delete response.value[i]['odata.metadata']; - ++i; - } - return response; + _createResponsePayload(payload, accept) { + const response = {}; + if (accept !== ODataMode.NONE) { + response[ + "odata.metadata" + ] = `http://127.0.0.1:10002/devstoreaccount1/$metadata#Tables`; + } + response.value = []; + let i = 0; + for (const item of payload) { + response.value.push(item.odata(accept)); + delete response.value[i]["odata.metadata"]; + ++i; } + return response; + } } -module.exports = new QueryTable(); \ No newline at end of file +module.exports = new QueryTable(); diff --git a/lib/actions/table/UpdateEntity.js b/lib/actions/table/UpdateEntity.js index dd7e70373..09b9d00be 100644 --- a/lib/actions/table/UpdateEntity.js +++ b/lib/actions/table/UpdateEntity.js @@ -1,21 +1,21 @@ -'use strict'; +/** @format */ -const AzuriteTableResponse = require('./../../model/table/AzuriteTableResponse'), - tableStorageManager = require('./../../core/table/TableStorageManager'), - ODataMode = require('./../../core/Constants').ODataMode, - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const AzuriteTableResponse = require("./../../model/table/AzuriteTableResponse"), + tableStorageManager = require("./../../core/table/TableStorageManager"), + ODataMode = require("./../../core/Constants").ODataMode, + N = require("./../../core/HttpHeaderNames"); class UpdateEntity { - constructor() { - } + constructor() {} - process(request, res) { - tableStorageManager.updateEntity(request) - .then((response) => { - res.set(response.httpProps); - res.status(204).send(); - }); - } + process(request, res) { + tableStorageManager.updateEntity(request).then((response) => { + res.set(response.httpProps); + res.status(204).send(); + }); + } } -module.exports = new UpdateEntity; \ No newline at end of file +module.exports = new UpdateEntity(); diff --git a/lib/core/AzuriteError.js b/lib/core/AzuriteError.js index 0ef367d41..c377d2d6c 100644 --- a/lib/core/AzuriteError.js +++ b/lib/core/AzuriteError.js @@ -1,16 +1,18 @@ -'use strict'; +/** @format */ + +"use strict"; function generateErrorMessage(errorCode, userMessage) { - return `${errorCode}${userMessage}`; + return `${errorCode}${userMessage}`; } class AzuriteError extends Error { - constructor(e) { - super(e.errorCode); - this.message = generateErrorMessage(e.errorCode, e.userMessage || ""); - this.statusCode = e.httpErrorCode; - Error.captureStackTrace(this, this.constructor); - } + constructor(e) { + super(e.errorCode); + this.message = generateErrorMessage(e.errorCode, e.userMessage || ""); + this.statusCode = e.httpErrorCode; + Error.captureStackTrace(this, this.constructor); + } } -module.exports = AzuriteError; \ No newline at end of file +module.exports = AzuriteError; diff --git a/lib/core/Constants.js b/lib/core/Constants.js index 696331daa..327e32257 100644 --- a/lib/core/Constants.js +++ b/lib/core/Constants.js @@ -1,173 +1,175 @@ -'use strict'; +/** @format */ + +"use strict"; const StorageTables = { - Containers: 'Containers', - Commits: 'Commmits', - Pages: 'Pages', - ServiceProperties: 'ServiceProperties' -} + Containers: "Containers", + Commits: "Commmits", + Pages: "Pages", + ServiceProperties: "ServiceProperties", +}; const TableStorageTables = { - Tables: 'Tables' -} + Tables: "Tables", +}; const StorageEntityType = { - Container: 'Container', - BlockBlob: 'BlockBlob', - AppendBlob: 'AppendBlob', - PageBlob: 'PageBlob' -} + Container: "Container", + BlockBlob: "BlockBlob", + AppendBlob: "AppendBlob", + PageBlob: "PageBlob", +}; const Usage = { - Read: 1, - Write: 2, - Delete: 4, - Other: 8 -} + Read: 1, + Write: 2, + Delete: 4, + Other: 8, +}; const LeaseStatus = { - AVAILABLE: 'available', - BROKEN: 'broken', - BREAKING: 'breaking', - LEASED: 'leased', - EXPIRED: 'expired' -} + AVAILABLE: "available", + BROKEN: "broken", + BREAKING: "breaking", + LEASED: "leased", + EXPIRED: "expired", +}; const LeaseActions = { - ACQUIRE: 'acquire', - RENEW: 'renew', - CHANGE: 'change', - RELEASE: 'release', - BREAK: 'break' -} + ACQUIRE: "acquire", + RENEW: "renew", + CHANGE: "change", + RELEASE: "release", + BREAK: "break", +}; const BlockListType = { - COMMITTED: 'committed', - UNCOMMITTED: 'uncommitted', - ALL: 'all' -} + COMMITTED: "committed", + UNCOMMITTED: "uncommitted", + ALL: "all", +}; const Operations = { - Undefined: 'Undefined', - Account: { - LIST_CONTAINERS: 'ListContainers', - SET_BLOB_SERVICE_PROPERTIES: 'SetBlobServiceProperties', - GET_BLOB_SERVICE_PROPERTIES: 'GetBlobServiceProperties', - PREFLIGHT_BLOB_REQUEST: 'PreflightBlobRequest' - }, - Container: { - CREATE_CONTAINER: 'CreateContainer', - GET_CONTAINER_PROPERTIES: 'GetContainerProperties', - GET_CONTAINER_METADATA: 'GetContainerMetadata', - SET_CONTAINER_METADATA: 'SetContainerMetadata', - GET_CONTAINER_ACL: 'GetContainerAcl', - SET_CONTAINER_ACL: 'SetContainerAcl', - DELETE_CONTAINER: 'DeleteContainer', - LEASE_CONTAINER: 'LeaseContainer', - LIST_BLOBS: 'ListBlobs' - }, - Blob: { - PUT_BLOB: 'PutBlob', - GET_BLOB: 'GetBlob', - GET_BLOB_PROPERTIES: 'GetBlobProperties', - SET_BLOB_PROPERTIES: 'SetBlobProperties', - GET_BLOB_METADATA: 'GetBlobMetadata', - SET_BLOB_METADATA: 'SetBlobMetadata', - LEASE_BLOB: 'LeaseBlob', - SNAPSHOT_BLOB: 'SnapshotBlob', - COPY_BLOB: 'CopyBlob', - ABORT_COPY_BLOB: 'AbortCopyBlob', - DELETE_BLOB: 'DeleteBlob', - SET_BLOB_TIER: 'SetBlobTier', - PUT_BLOCK: 'PutBlock', - PUT_BLOCK_LIST: 'PutBlockList', - GET_BLOCK_LIST: 'GetBlockList', - PUT_PAGE: 'PutPage', - GET_PAGE_RANGES: 'GetPageRanges', - INCREMENTAL_COPY_BLOB: 'IncrementalCopyBlob', - APPEND_BLOCK: 'AppendBlock', - COPY_BLOB: 'CopyBlob', - ABORT_COPY_BLOB: 'AbortCopyBlob' - }, - Queue: { - CREATE_QUEUE: 'CreateQueue', - DELETE_QUEUE: 'DeleteQueue', - SET_QUEUE_METADATA: 'SetQueueMetadata', - GET_QUEUE_METADATA: 'GetQueueMetadata', - PUT_MESSAGE: 'PutMessage', - GET_MESSAGE: 'GetMessage', - CLEAR_MESSAGES: 'ClearMessages', - PEEK_MESSAGES: 'PeekMessages', - DELETE_MESSAGE: 'DeleteMessage', - UPDATE_MESSAGE: 'UpdateMessage', - LIST_QUEUES: 'ListQueues', - SET_QUEUE_ACL: 'SetQueueAcl', - GET_QUEUE_ACL: 'GetQueueAcl' - }, - Table: { - CREATE_TABLE: 'CreateTable', - INSERT_ENTITY: 'InsertEntity', - DELETE_TABLE: 'DeleteTable', - DELETE_ENTITY: 'DeleteEntity', - QUERY_TABLE: 'QueryTable', - QUERY_ENTITY: 'QueryEntity', - UPDATE_ENTITY: 'UpdateEntity', - INSERT_OR_REPLACE_ENTITY: 'InsertOrReplaceEntity', - MERGE_ENTITY: 'MergeEntity', - INSERT_OR_MERGE_ENTITY: 'InsertOrMergeEntity' - } -} + Undefined: "Undefined", + Account: { + LIST_CONTAINERS: "ListContainers", + SET_BLOB_SERVICE_PROPERTIES: "SetBlobServiceProperties", + GET_BLOB_SERVICE_PROPERTIES: "GetBlobServiceProperties", + PREFLIGHT_BLOB_REQUEST: "PreflightBlobRequest", + }, + Container: { + CREATE_CONTAINER: "CreateContainer", + GET_CONTAINER_PROPERTIES: "GetContainerProperties", + GET_CONTAINER_METADATA: "GetContainerMetadata", + SET_CONTAINER_METADATA: "SetContainerMetadata", + GET_CONTAINER_ACL: "GetContainerAcl", + SET_CONTAINER_ACL: "SetContainerAcl", + DELETE_CONTAINER: "DeleteContainer", + LEASE_CONTAINER: "LeaseContainer", + LIST_BLOBS: "ListBlobs", + }, + Blob: { + PUT_BLOB: "PutBlob", + GET_BLOB: "GetBlob", + GET_BLOB_PROPERTIES: "GetBlobProperties", + SET_BLOB_PROPERTIES: "SetBlobProperties", + GET_BLOB_METADATA: "GetBlobMetadata", + SET_BLOB_METADATA: "SetBlobMetadata", + LEASE_BLOB: "LeaseBlob", + SNAPSHOT_BLOB: "SnapshotBlob", + COPY_BLOB: "CopyBlob", + ABORT_COPY_BLOB: "AbortCopyBlob", + DELETE_BLOB: "DeleteBlob", + SET_BLOB_TIER: "SetBlobTier", + PUT_BLOCK: "PutBlock", + PUT_BLOCK_LIST: "PutBlockList", + GET_BLOCK_LIST: "GetBlockList", + PUT_PAGE: "PutPage", + GET_PAGE_RANGES: "GetPageRanges", + INCREMENTAL_COPY_BLOB: "IncrementalCopyBlob", + APPEND_BLOCK: "AppendBlock", + COPY_BLOB: "CopyBlob", + ABORT_COPY_BLOB: "AbortCopyBlob", + }, + Queue: { + CREATE_QUEUE: "CreateQueue", + DELETE_QUEUE: "DeleteQueue", + SET_QUEUE_METADATA: "SetQueueMetadata", + GET_QUEUE_METADATA: "GetQueueMetadata", + PUT_MESSAGE: "PutMessage", + GET_MESSAGE: "GetMessage", + CLEAR_MESSAGES: "ClearMessages", + PEEK_MESSAGES: "PeekMessages", + DELETE_MESSAGE: "DeleteMessage", + UPDATE_MESSAGE: "UpdateMessage", + LIST_QUEUES: "ListQueues", + SET_QUEUE_ACL: "SetQueueAcl", + GET_QUEUE_ACL: "GetQueueAcl", + }, + Table: { + CREATE_TABLE: "CreateTable", + INSERT_ENTITY: "InsertEntity", + DELETE_TABLE: "DeleteTable", + DELETE_ENTITY: "DeleteEntity", + QUERY_TABLE: "QueryTable", + QUERY_ENTITY: "QueryEntity", + UPDATE_ENTITY: "UpdateEntity", + INSERT_OR_REPLACE_ENTITY: "InsertOrReplaceEntity", + MERGE_ENTITY: "MergeEntity", + INSERT_OR_MERGE_ENTITY: "InsertOrMergeEntity", + }, +}; const CopyStatus = { - PENDING: 'pending', - SUCCESS: 'success', - FAILED: 'failed', - ABORTED: 'aborted' -} + PENDING: "pending", + SUCCESS: "success", + FAILED: "failed", + ABORTED: "aborted", +}; // See allowed operations below in comments const ServiceSAS = { - Blob: { - // Read the content, properties, metadata or block list of any blob in the container. Use any blob in the container as the source of a copy operation. - READ: 'r', - // Add a block to any append blob in the container. - ADD: 'a', - // Write a new blob to the container, snapshot any blob in the container, or copy a blob to a new blob in the container. - CREATE: 'c', - // For any blob in the container, create or write content, properties, metadata, or block list. Snapshot or lease the blob. - // Resize the blob (page blob only). Use the blob as the destination of a copy operation. Note: You cannot grant permissions - // to read or write container properties or metadata, nor to lease a container, with a service SAS. Use an account SAS instead. - WRITE: 'w', - // Delete any blob in the container. Note: You cannot grant permissions to delete a container with a service SAS. Use an account SAS instead. - DELETE: 'd', - // List blobs in the container. - LIST: 'l' - } -} + Blob: { + // Read the content, properties, metadata or block list of any blob in the container. Use any blob in the container as the source of a copy operation. + READ: "r", + // Add a block to any append blob in the container. + ADD: "a", + // Write a new blob to the container, snapshot any blob in the container, or copy a blob to a new blob in the container. + CREATE: "c", + // For any blob in the container, create or write content, properties, metadata, or block list. Snapshot or lease the blob. + // Resize the blob (page blob only). Use the blob as the destination of a copy operation. Note: You cannot grant permissions + // to read or write container properties or metadata, nor to lease a container, with a service SAS. Use an account SAS instead. + WRITE: "w", + // Delete any blob in the container. Note: You cannot grant permissions to delete a container with a service SAS. Use an account SAS instead. + DELETE: "d", + // List blobs in the container. + LIST: "l", + }, +}; const _key = `Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==`; const Keys = { - AccessKey: _key, - DecodedAccessKey: Buffer.from(_key, 'base64') -} + AccessKey: _key, + DecodedAccessKey: Buffer.from(_key, "base64"), +}; const ODataMode = { - NONE: 'nometadata', - MINIMAL: 'minimalmetadata', - FULL: 'fullmetadata' -} + NONE: "nometadata", + MINIMAL: "minimalmetadata", + FULL: "fullmetadata", +}; module.exports = { - StorageTables: StorageTables, - StorageEntityType: StorageEntityType, - LeaseStatus: LeaseStatus, - LeaseActions: LeaseActions, - Usage: Usage, - Operations: Operations, - CopyStatus: CopyStatus, - BlockListType: BlockListType, - ServiceSAS: ServiceSAS, - Keys: Keys, - ODataMode: ODataMode, - TableStorageTables: TableStorageTables -} \ No newline at end of file + StorageTables: StorageTables, + StorageEntityType: StorageEntityType, + LeaseStatus: LeaseStatus, + LeaseActions: LeaseActions, + Usage: Usage, + Operations: Operations, + CopyStatus: CopyStatus, + BlockListType: BlockListType, + ServiceSAS: ServiceSAS, + Keys: Keys, + ODataMode: ODataMode, + TableStorageTables: TableStorageTables, +}; diff --git a/lib/core/ErrorCodes.js b/lib/core/ErrorCodes.js index c8dc2a470..eb82ce260 100644 --- a/lib/core/ErrorCodes.js +++ b/lib/core/ErrorCodes.js @@ -1,81 +1,392 @@ -'use strict'; +/** @format */ + +"use strict"; /** - * Common and Blob Error codes as specified at + * Common and Blob Error codes as specified at * https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/status-and-error-codes2 */ class ErrorCode { - constructor(errorCode, httpErrorCode, userMessage) { - this.errorCode = errorCode; - this.httpErrorCode = httpErrorCode; - this.userMessage = userMessage; - } + constructor(errorCode, httpErrorCode, userMessage) { + this.errorCode = errorCode; + this.httpErrorCode = httpErrorCode; + this.userMessage = userMessage; + } } module.exports = { - // GENERAL - InvalidXml: new ErrorCode('Invalid XML.', 400, 'One of the XML nodes specified in the request body is not supported.'), - InvalidXmlRequest: new ErrorCode('InvalidXmlRequest', 400, 'The request body’s XML was invalid or not correctly specified.'), - AuthenticationFailed: new ErrorCode('AuthenticationFailed', 403, 'Server failed to authenticate the request. Make sure the value of the Authorization header is formed correctly including the signature.'), - AuthorizationPermissionMismatch: new ErrorCode('AuthorizationPermissionMismatch', 403, 'This request is not authorized to perform this operation using this permission.'), - AuthorizationResourceTypeMismatch: new ErrorCode('AuthorizationResourceTypeMismatch', 403, 'This request is not authorized to perform this operation using this resource type.'), - CorsForbidden: new ErrorCode('Forbidden', 403, 'CORS validation failed.'), - MissingRequiredHeader: new ErrorCode('MissingRequiredHeader', 400, 'A required HTTP header was not specified.'), - ResourceNotFound: new ErrorCode('ResourceNotFound', 404, 'The specified resource does not exist.'), + // GENERAL + InvalidXml: new ErrorCode( + "Invalid XML.", + 400, + "One of the XML nodes specified in the request body is not supported." + ), + InvalidXmlRequest: new ErrorCode( + "InvalidXmlRequest", + 400, + "The request body’s XML was invalid or not correctly specified." + ), + AuthenticationFailed: new ErrorCode( + "AuthenticationFailed", + 403, + "Server failed to authenticate the request. Make sure the value of the Authorization header is formed correctly including the signature." + ), + AuthorizationPermissionMismatch: new ErrorCode( + "AuthorizationPermissionMismatch", + 403, + "This request is not authorized to perform this operation using this permission." + ), + AuthorizationResourceTypeMismatch: new ErrorCode( + "AuthorizationResourceTypeMismatch", + 403, + "This request is not authorized to perform this operation using this resource type." + ), + CorsForbidden: new ErrorCode("Forbidden", 403, "CORS validation failed."), + MissingRequiredHeader: new ErrorCode( + "MissingRequiredHeader", + 400, + "A required HTTP header was not specified." + ), + ResourceNotFound: new ErrorCode( + "ResourceNotFound", + 404, + "The specified resource does not exist." + ), - // BLOB - ContainerNotFound: new ErrorCode('ContainerNotFound', 404, 'The specified container does not exist.'), - ContainerAlreadyExists: new ErrorCode('ContainerAlreadyExists', 409, 'The specified container already exists.'), - InvalidHeaderValue: new ErrorCode('InvalidHeaderValue', 400, 'The value provided for one of the HTTP headers was not in the correct format.'), - InvalidInput: new ErrorCode('InvalidInput', 400, 'One of the request inputs is not valid.'), - InvalidPageRange: new ErrorCode('InvalidPageRange', 416, 'The page range specified is invalid.'), - MissingContentLengthHeader: new ErrorCode('MissingContentLengthHeader', 411, 'The Content-Length header was not specified.'), - Md5Mismatch: new ErrorCode('Md5Mismatch', 400, 'The MD5 value specified in the request did not match the MD5 value calculated by the server.'), - PreconditionFailed: new ErrorCode('PreconditionFailed', 412, 'One of the XML nodes specified in the request body is not supported.'), - BlockCountExceedsLimit: new ErrorCode('BlockCountExceedsLimit', 409, 'The committed block count cannot exceed the maximum limit of 50,000 blocks.'), - InvalidBlobType: new ErrorCode('InvalidBlobType', 409, 'The blob type is invalid for this operation.'), - RequestBodyTooLarge: new ErrorCode('RequestBodyTooLarge', 413, 'The size of the request body exceeds the maximum size permitted.'), - BlobNotFound: new ErrorCode('BlobNotFound', 404, 'The specified blob does not exist.'), - BlobAlreadyExists: new ErrorCode('BlobAlreadyExists', 409, 'The specified blob already exists.'), - UnsupportedHeader: new ErrorCode('UnsupportedHeader', 400, 'One of the headers specified in the request is not supported.'), - UnsupportedBlobType: new ErrorCode('UnsupportedBlobType', 400, 'The blob type is invalid for this operation.'), - SnapshotsPresent: new ErrorCode('SnapshotsPresent', 409, 'This operation is not permitted while the blob has snapshots.'), - LeaseNotPresentWithLeaseOperation: new ErrorCode('LeaseNotPresentWithLeaseOperation', 409, 'There is currently no lease on the blob/container.'), - LeaseIdMismatchWithLeaseOperation: new ErrorCode('LeaseIdMismatchWithLeaseOperation', 409, 'The lease ID specified did not match the lease ID for the blob/container.'), - LeaseAlreadyPresent: new ErrorCode('LeaseAlreadyPresent', 409, 'There is already a lease present.'), - LeaseIsBreakingAndCannotBeChanged: new ErrorCode('LeaseIsBreakingAndCannotBeChanged', 409, 'The lease ID matched, but the lease is currently in breaking state and cannot be changed.'), - LeaseIsBreakingAndCannotBeAcquired: new ErrorCode('LeaseIsBreakingAndCannotBeAcquired', 409, 'The lease ID matched, but the lease is currently in breaking state and cannot be acquired until it is broken.'), - LeaseIsBrokenAndCannotBeRenewed: new ErrorCode('LeaseIsBrokenAndCannotBeRenewed', 409, 'The lease ID matched, but the lease has been broken explicitly and cannot be renewed.'), - LeaseNotPresentWithContainerOperation: new ErrorCode('LeaseNotPresentWithContainerOperation', 412, 'There is currently no lease on the container.'), - LeaseNotPresentWithBlobOperation: new ErrorCode('LeaseNotPresentWithBlobOperation', 412, 'There is currently no lease on the blob.'), - LeaseIdMissing: new ErrorCode('LeaseIdMissing', 412, 'There is currently a lease on the blob/container and no lease ID was specified in the request.'), - LeaseIdMismatchWithContainerOperation: new ErrorCode('LeaseIdMismatchWithContainerOperation', 412, 'The lease ID specified did not match the lease ID for the container.'), - LeaseIdMismatchWithBlobOperation: new ErrorCode('LeaseIdMismatchWithBlobOperation', 412, 'The lease ID specified did not match the lease ID for the blob.'), - ConditionNotMetWrite: new ErrorCode('ConditionNotMet', 412, 'The condition specified in the conditional header(s) was not met for a write operation.'), - ConditionNotMetRead: new ErrorCode('ConditionNotMet', 304, 'The condition specified in the conditional header(s) was not met for a read operation.'), - MaxBlobSizeConditionNotMet: new ErrorCode('MaxBlobSizeConditionNotMet', 412, 'The max blob size condition specified was not met.'), - AppendPositionConditionNotMet: new ErrorCode('AppendPositionConditionNotMet', 412, 'The append position condition specified was not met.'), - InvalidRange: new ErrorCode('InvalidRange', 416, 'The range specified is invalid for the current size of the resource.'), - InternalError: new ErrorCode('InternalError', 500, 'The server encountered an internal error. Please retry the request.'), - PendingCopyOperation: new ErrorCode('PendingCopyOperation', 409, 'There is currently a pending copy operation.'), - NoPendingCopyOperation: new ErrorCode('NoPendingCopyOperation', 409, 'There is currently no pending copy operation.'), - InvalidBlockList: new ErrorCode('InvalidBlockList', 400, 'The specified block list is invalid.'), - InvalidResourceName: new ErrorCode('InvalidResourceName', 400, 'The specifed resource name contains invalid characters.'), + // BLOB + ContainerNotFound: new ErrorCode( + "ContainerNotFound", + 404, + "The specified container does not exist." + ), + ContainerAlreadyExists: new ErrorCode( + "ContainerAlreadyExists", + 409, + "The specified container already exists." + ), + InvalidHeaderValue: new ErrorCode( + "InvalidHeaderValue", + 400, + "The value provided for one of the HTTP headers was not in the correct format." + ), + InvalidInput: new ErrorCode( + "InvalidInput", + 400, + "One of the request inputs is not valid." + ), + InvalidPageRange: new ErrorCode( + "InvalidPageRange", + 416, + "The page range specified is invalid." + ), + MissingContentLengthHeader: new ErrorCode( + "MissingContentLengthHeader", + 411, + "The Content-Length header was not specified." + ), + Md5Mismatch: new ErrorCode( + "Md5Mismatch", + 400, + "The MD5 value specified in the request did not match the MD5 value calculated by the server." + ), + PreconditionFailed: new ErrorCode( + "PreconditionFailed", + 412, + "One of the XML nodes specified in the request body is not supported." + ), + BlockCountExceedsLimit: new ErrorCode( + "BlockCountExceedsLimit", + 409, + "The committed block count cannot exceed the maximum limit of 50,000 blocks." + ), + InvalidBlobType: new ErrorCode( + "InvalidBlobType", + 409, + "The blob type is invalid for this operation." + ), + RequestBodyTooLarge: new ErrorCode( + "RequestBodyTooLarge", + 413, + "The size of the request body exceeds the maximum size permitted." + ), + BlobNotFound: new ErrorCode( + "BlobNotFound", + 404, + "The specified blob does not exist." + ), + BlobAlreadyExists: new ErrorCode( + "BlobAlreadyExists", + 409, + "The specified blob already exists." + ), + UnsupportedHeader: new ErrorCode( + "UnsupportedHeader", + 400, + "One of the headers specified in the request is not supported." + ), + UnsupportedBlobType: new ErrorCode( + "UnsupportedBlobType", + 400, + "The blob type is invalid for this operation." + ), + SnapshotsPresent: new ErrorCode( + "SnapshotsPresent", + 409, + "This operation is not permitted while the blob has snapshots." + ), + LeaseNotPresentWithLeaseOperation: new ErrorCode( + "LeaseNotPresentWithLeaseOperation", + 409, + "There is currently no lease on the blob/container." + ), + LeaseIdMismatchWithLeaseOperation: new ErrorCode( + "LeaseIdMismatchWithLeaseOperation", + 409, + "The lease ID specified did not match the lease ID for the blob/container." + ), + LeaseAlreadyPresent: new ErrorCode( + "LeaseAlreadyPresent", + 409, + "There is already a lease present." + ), + LeaseIsBreakingAndCannotBeChanged: new ErrorCode( + "LeaseIsBreakingAndCannotBeChanged", + 409, + "The lease ID matched, but the lease is currently in breaking state and cannot be changed." + ), + LeaseIsBreakingAndCannotBeAcquired: new ErrorCode( + "LeaseIsBreakingAndCannotBeAcquired", + 409, + "The lease ID matched, but the lease is currently in breaking state and cannot be acquired until it is broken." + ), + LeaseIsBrokenAndCannotBeRenewed: new ErrorCode( + "LeaseIsBrokenAndCannotBeRenewed", + 409, + "The lease ID matched, but the lease has been broken explicitly and cannot be renewed." + ), + LeaseNotPresentWithContainerOperation: new ErrorCode( + "LeaseNotPresentWithContainerOperation", + 412, + "There is currently no lease on the container." + ), + LeaseNotPresentWithBlobOperation: new ErrorCode( + "LeaseNotPresentWithBlobOperation", + 412, + "There is currently no lease on the blob." + ), + LeaseIdMissing: new ErrorCode( + "LeaseIdMissing", + 412, + "There is currently a lease on the blob/container and no lease ID was specified in the request." + ), + LeaseIdMismatchWithContainerOperation: new ErrorCode( + "LeaseIdMismatchWithContainerOperation", + 412, + "The lease ID specified did not match the lease ID for the container." + ), + LeaseIdMismatchWithBlobOperation: new ErrorCode( + "LeaseIdMismatchWithBlobOperation", + 412, + "The lease ID specified did not match the lease ID for the blob." + ), + ConditionNotMetWrite: new ErrorCode( + "ConditionNotMet", + 412, + "The condition specified in the conditional header(s) was not met for a write operation." + ), + ConditionNotMetRead: new ErrorCode( + "ConditionNotMet", + 304, + "The condition specified in the conditional header(s) was not met for a read operation." + ), + MaxBlobSizeConditionNotMet: new ErrorCode( + "MaxBlobSizeConditionNotMet", + 412, + "The max blob size condition specified was not met." + ), + AppendPositionConditionNotMet: new ErrorCode( + "AppendPositionConditionNotMet", + 412, + "The append position condition specified was not met." + ), + InvalidRange: new ErrorCode( + "InvalidRange", + 416, + "The range specified is invalid for the current size of the resource." + ), + InternalError: new ErrorCode( + "InternalError", + 500, + "The server encountered an internal error. Please retry the request." + ), + PendingCopyOperation: new ErrorCode( + "PendingCopyOperation", + 409, + "There is currently a pending copy operation." + ), + NoPendingCopyOperation: new ErrorCode( + "NoPendingCopyOperation", + 409, + "There is currently no pending copy operation." + ), + InvalidBlockList: new ErrorCode( + "InvalidBlockList", + 400, + "The specified block list is invalid." + ), + InvalidResourceName: new ErrorCode( + "InvalidResourceName", + 400, + "The specifed resource name contains invalid characters." + ), - // QUEUE - OutOfRangeInput: new ErrorCode('OutOfRangeInput', 400, 'One of the request inputs is out of range.'), - QueueAlreadyExists: new ErrorCode('QueueAlreadyExists', 409, 'The specified queue already exists.'), - QueueNotFound: new ErrorCode('QueueNotFound', 404, 'The specified queue does not exist.'), - MessageTooLarge: new ErrorCode('MessageTooLarge', 400, 'The message exceeds the maximum allowed size.'), - MessageNotFound: new ErrorCode('MessageNotFound', 404, 'The specified message does not exist.'), - PopReceiptMismatch: new ErrorCode('PopReceiptMismatch', 400, 'The specified pop receipt did not match the pop receipt for a dequeued message.'), + // QUEUE + OutOfRangeInput: new ErrorCode( + "OutOfRangeInput", + 400, + "One of the request inputs is out of range." + ), + QueueAlreadyExists: new ErrorCode( + "QueueAlreadyExists", + 409, + "The specified queue already exists." + ), + QueueNotFound: new ErrorCode( + "QueueNotFound", + 404, + "The specified queue does not exist." + ), + MessageTooLarge: new ErrorCode( + "MessageTooLarge", + 400, + "The message exceeds the maximum allowed size." + ), + MessageNotFound: new ErrorCode( + "MessageNotFound", + 404, + "The specified message does not exist." + ), + PopReceiptMismatch: new ErrorCode( + "PopReceiptMismatch", + 400, + "The specified pop receipt did not match the pop receipt for a dequeued message." + ), - // TABLE - AtomXmlNotSupported: new ErrorCode('Atom+XmlNotSupported', 501, 'Atom feed is currently not supported by Azurite.'), - TableAlreadyExists: new ErrorCode('TableAlreadyExists', 409, 'The table specified already exists.'), - TableNotFound: new ErrorCode('TableNotFound', 404, 'The table specified does not exist.'), - EntityAlreadyExists: new ErrorCode('EntityAlreadyExists', 409, 'The specified entity already exists.'), - // See https://docs.microsoft.com/en-us/rest/api/storageservices/understanding-the-table-service-data-model - ReservedTableName: new ErrorCode('BadRequest', 404, 'The table name is reserved.'), - UpdateConditionNotSatisfied: new ErrorCode('UpdateConditionNotSatisfied', 412, 'The update condition specified in the request was not satisfied.') -} \ No newline at end of file + // TABLE see: https://docs.microsoft.com/en-us/rest/api/storageservices/table-service-error-codes + AtomXmlNotSupported: new ErrorCode( + "Atom+XmlNotSupported", + 501, + "Atom feed is currently not supported by Azurite." + ), + // See https://docs.microsoft.com/en-us/rest/api/storageservices/understanding-the-table-service-data-model + ReservedTableName: new ErrorCode( + "BadRequest", + 404, + "The table name is reserved." + ), + DuplicatePropertiesSpecified: new ErrorCode( + "DuplicatePropertiesSpecified", + 400, + "A property is specified more than one time." + ), + EntityNotFound: new ErrorCode( + "EntityNotFound", + 404, + "The specified entity does not exist." + ), + EntityAlreadyExists: new ErrorCode( + "EntityAlreadyExists", + 409, + "The specified entity already exists." + ), + EntityTooLarge: new ErrorCode( + "EntityTooLarge", + 400, + "The entity is larger than the maximum size permitted." + ), + HostInformationNotPresent: new ErrorCode( + "HostInformationNotPresent", + 400, + "The required host information is not present in the request. You must send a non-empty Host header or include the absolute URI in the request line." + ), + InvalidValueType: new ErrorCode( + "InvalidValueType", + 400, + "The value specified is invalid." + ), + JsonFormatNotSupported: new ErrorCode( + "JsonFormatNotSupported", + 415, + "JSON format is not supported." + ), + MethodNotAllowed: new ErrorCode( + "MethodNotAllowed", + 405, + "The requested method is not allowed on the specified resource." + ), + NotImplemented: new ErrorCode( + "NotImplemented", + 501, + "The requested operation is not implemented on the specified resource." + ), + PropertiesNeedValue: new ErrorCode( + "PropertiesNeedValue", + 400, + "Values have not been specified for all properties in the entity." + ), + PropertyNameInvalid: new ErrorCode( + "PropertyNameInvalid", + 400, + "The property name is invalid." + ), + PropertyNameTooLong: new ErrorCode( + "PropertyNameTooLong", + 400, + "The property name exceeds the maximum allowed length." + ), + PropertyValueTooLarge: new ErrorCode( + "PropertyValueTooLarge", + 400, + "The property value is larger than the maximum size permitted." + ), + TableAlreadyExists: new ErrorCode( + "TableAlreadyExists", + 409, + "The table specified already exists." + ), + TableBeingDeleted: new ErrorCode( + "TableBeingDeleted", + 409, + "The specified table is being deleted." + ), + TableNotFound: new ErrorCode( + "TableNotFound", + 404, + "The table specified does not exist." + ), + TooManyProperties: new ErrorCode( + "TooManyProperties", + 400, + "The entity contains more properties than allowed." + ), + UpdateConditionNotSatisfied: new ErrorCode( + "UpdateConditionNotSatisfied", + 412, + "The update condition specified in the request was not satisfied." + ), + XMethodIncorrectCount: new ErrorCode( + "XMethodIncorrectCount", + 400, + "More than one X-HTTP-Method is specified." + ), + XMethodIncorrectValue: new ErrorCode( + "XMethodIncorrectValue", + 400, + "The specified X-HTTP-Method is invalid." + ), + XMethodNotUsingPost: new ErrorCode( + "XMethodNotUsingPost", + 400, + "The request uses X-HTTP-Method with an HTTP verb other than POST." + ), +}; diff --git a/lib/core/HttpHeaderNames.js b/lib/core/HttpHeaderNames.js index badc25e1f..cc4885e7a 100644 --- a/lib/core/HttpHeaderNames.js +++ b/lib/core/HttpHeaderNames.js @@ -1,83 +1,85 @@ -'use strict'; +/** @format */ + +"use strict"; module.exports = { - ETAG: 'etag', - LAST_MODIFIED: 'last-modified', - CONTENT_LENGTH: 'content-length', - CONTENT_TYPE: 'content-type', - CONTENT_ENCODING: 'content-encoding', - CONTENT_DISPOSITION: 'content-disposition', - CACHE_CONTROL: 'cache-control', - CONTENT_LANGUAGE: 'content-language', - CONTENT_MD5: 'content-md5', - BLOB_CONTENT_MD5: 'x-ms-blob-content-md5', - RANGE: 'range', - RANGE_GET_CONTENT_MD5: 'x-ms-range-get-content-md5', - CONTENT_RANGE: 'content-range', - ACCEPT_RANGES: 'accept-ranges', - DELETE_SNAPSHOTS: 'x-ms-delete-snapshots', - SNAPSHOT_DATE: 'x-ms-snapshot', - LEASE_ID: 'x-ms-lease-id', - LEASE_ACTION: 'x-ms-lease-action', - LEASE_DURATION: 'x-ms-lease-duration', - LEASE_BREAK_PERIOD: 'x-ms-lease-break-period', - LEASE_TIME: 'x-ms-lease-time', - PROPOSED_LEASE_ID: 'x-ms-proposed-lease-id', - IF_MODFIFIED_SINCE: 'if-modified-since', - IF_UNMODIFIED_SINCE: 'if-unmodified-since', - IF_MATCH: 'if-match', - IF_NONE_MATCH: 'if_none_match', - SOURCE_IF_MODFIFIED_SINCE: 'x-ms-source-if-modified-since', - SOURCE_IF_UNMODIFIED_SINCE: 'x-ms-source-if-unmodified-since', - SOURCE_IF_MATCH: 'x-ms-source-if-match', - SOURCE_IF_NONE_MATCH: 'x-ms-source-if_none_match', - BLOB_PUBLIC_ACCESS: 'x-ms-blob-public-access', - BLOB_TYPE: 'x-ms-blob-type', - REQUEST_SERVER_ENCRYPTED: 'x-ms-request-server-encrypted', - LEASE_STATUS: 'x-ms-lease-status', - LEASE_STATE: 'x-ms-lease-state', - LEASE_DURATION: 'x-ms-lease-duration', - COPY_SOURCE: 'x-ms-copy-source', - COPY_COMPLETION_TIME: 'x-ms-copy-completion-time', - COPY_STATUS: 'x-ms-copy-status', - COPY_ID: 'x-ms-copy-id', - COPY_STATUS_DESCRIPTION: 'x-ms-copy-status-description', - COPY_PROGRESS: 'x-ms-copy-progress', - INCREMENTAL_COPY: 'x-ms-incremental-copy', - COPY_DESTINATION_SNAPSHOT: 'x-ms-copy-destination-snapshot', - ACCESS_CONTROL_REQUEST_METHOD: 'access-control-request-method', - ACCESS_CONTROL_REQUEST_HEADERS: 'access-control-request-headers', - ACCESS_CONTROL_ALLOW_ORIGIN: 'access-control-allow-origin', - ACCESS_CONTROL_ALLOW_METHODS: 'access-control-allow-methods', - ACCESS_CONTROL_ALLOW_HEADERS: 'access-control-allow-headers', - ACCESS_CONTROL_MAX_AGE: 'access-control-max-age', - ACCESS_CONTROL_ALLOW_CREDENTIALS: 'access-control-allow-credentials', - ACCESS_CONTROL_EXPOSE_HEADERS: 'access-control-expose-headers', - // Append Blob specific attributes - BLOB_CONDITION_MAX_SIZE: 'x-ms-blob-condition-maxsize', - BLOB_COMMITTED_BLOCK_COUNT: 'x-ms-blob-committed-block-count', - BLOB_CONDITION_APPENDPOS: 'x-ms-blob-condition-appendpos', - BLOB_APPEND_OFFSET: 'x-ms-blob-append-offset', - // Page Blob specific - SEQUENCE_NUMBER: 'x-ms-blob-sequence-number', - BLOB_CONTENT_LENGTH: 'x-ms-blob-content-length', - PAGE_WRITE: 'x-ms-page-write', - IF_SEQUENCE_NUMBER_LE: 'x-ms-if-sequence-number-le', - IF_SEQUENCE_NUMBER_LT: 'x-ms-if-sequence-number-lt', - IF_SEQUENCE_NUMBER_EQ: 'x-ms-if-sequence-number-eq', + ETAG: "etag", + LAST_MODIFIED: "last-modified", + CONTENT_LENGTH: "content-length", + CONTENT_TYPE: "content-type", + CONTENT_ENCODING: "content-encoding", + CONTENT_DISPOSITION: "content-disposition", + CACHE_CONTROL: "cache-control", + CONTENT_LANGUAGE: "content-language", + CONTENT_MD5: "content-md5", + BLOB_CONTENT_MD5: "x-ms-blob-content-md5", + RANGE: "range", + RANGE_GET_CONTENT_MD5: "x-ms-range-get-content-md5", + CONTENT_RANGE: "content-range", + ACCEPT_RANGES: "accept-ranges", + DELETE_SNAPSHOTS: "x-ms-delete-snapshots", + SNAPSHOT_DATE: "x-ms-snapshot", + LEASE_ID: "x-ms-lease-id", + LEASE_ACTION: "x-ms-lease-action", + LEASE_DURATION: "x-ms-lease-duration", + LEASE_BREAK_PERIOD: "x-ms-lease-break-period", + LEASE_TIME: "x-ms-lease-time", + PROPOSED_LEASE_ID: "x-ms-proposed-lease-id", + IF_MODFIFIED_SINCE: "if-modified-since", + IF_UNMODIFIED_SINCE: "if-unmodified-since", + IF_MATCH: "if-match", + IF_NONE_MATCH: "if_none_match", + SOURCE_IF_MODFIFIED_SINCE: "x-ms-source-if-modified-since", + SOURCE_IF_UNMODIFIED_SINCE: "x-ms-source-if-unmodified-since", + SOURCE_IF_MATCH: "x-ms-source-if-match", + SOURCE_IF_NONE_MATCH: "x-ms-source-if_none_match", + BLOB_PUBLIC_ACCESS: "x-ms-blob-public-access", + BLOB_TYPE: "x-ms-blob-type", + REQUEST_SERVER_ENCRYPTED: "x-ms-request-server-encrypted", + LEASE_STATUS: "x-ms-lease-status", + LEASE_STATE: "x-ms-lease-state", + LEASE_DURATION: "x-ms-lease-duration", + COPY_SOURCE: "x-ms-copy-source", + COPY_COMPLETION_TIME: "x-ms-copy-completion-time", + COPY_STATUS: "x-ms-copy-status", + COPY_ID: "x-ms-copy-id", + COPY_STATUS_DESCRIPTION: "x-ms-copy-status-description", + COPY_PROGRESS: "x-ms-copy-progress", + INCREMENTAL_COPY: "x-ms-incremental-copy", + COPY_DESTINATION_SNAPSHOT: "x-ms-copy-destination-snapshot", + ACCESS_CONTROL_REQUEST_METHOD: "access-control-request-method", + ACCESS_CONTROL_REQUEST_HEADERS: "access-control-request-headers", + ACCESS_CONTROL_ALLOW_ORIGIN: "access-control-allow-origin", + ACCESS_CONTROL_ALLOW_METHODS: "access-control-allow-methods", + ACCESS_CONTROL_ALLOW_HEADERS: "access-control-allow-headers", + ACCESS_CONTROL_MAX_AGE: "access-control-max-age", + ACCESS_CONTROL_ALLOW_CREDENTIALS: "access-control-allow-credentials", + ACCESS_CONTROL_EXPOSE_HEADERS: "access-control-expose-headers", + // Append Blob specific attributes + BLOB_CONDITION_MAX_SIZE: "x-ms-blob-condition-maxsize", + BLOB_COMMITTED_BLOCK_COUNT: "x-ms-blob-committed-block-count", + BLOB_CONDITION_APPENDPOS: "x-ms-blob-condition-appendpos", + BLOB_APPEND_OFFSET: "x-ms-blob-append-offset", + // Page Blob specific + SEQUENCE_NUMBER: "x-ms-blob-sequence-number", + BLOB_CONTENT_LENGTH: "x-ms-blob-content-length", + PAGE_WRITE: "x-ms-page-write", + IF_SEQUENCE_NUMBER_LE: "x-ms-if-sequence-number-le", + IF_SEQUENCE_NUMBER_LT: "x-ms-if-sequence-number-lt", + IF_SEQUENCE_NUMBER_EQ: "x-ms-if-sequence-number-eq", - // Queue specific - POP_RECEIPT: 'x-ms-popreceipt', - VISIBLE_NEXT_TIME: 'x-ms-time-next-visible', - APPROXIMATE_MESSAGES_COUNT: 'x-ms-approximate-messages-count', + // Queue specific + POP_RECEIPT: "x-ms-popreceipt", + VISIBLE_NEXT_TIME: "x-ms-time-next-visible", + APPROXIMATE_MESSAGES_COUNT: "x-ms-approximate-messages-count", - // Table specific - PREFERENCE_APPLIED: 'preference-applied', - PREFER: 'Prefer', - ACCEPT: 'accept', + // Table specific + PREFERENCE_APPLIED: "preference-applied", + PREFER: "prefer", + ACCEPT: "accept", - VERSION: 'x-ms-version', - DATE: 'date', - REQUEST_ID: 'x-ms-request-id', - ORIGIN: 'origin' -}; \ No newline at end of file + VERSION: "x-ms-version", + DATE: "date", + REQUEST_ID: "x-ms-request-id", + ORIGIN: "origin", +}; diff --git a/lib/core/InternalAzuriteError.js b/lib/core/InternalAzuriteError.js index c4cd5d3d5..ea755b8bf 100644 --- a/lib/core/InternalAzuriteError.js +++ b/lib/core/InternalAzuriteError.js @@ -1,15 +1,17 @@ -'use strict'; +/** @format */ + +"use strict"; function _generateErrorMessage(msg) { - return `*Internal Azurite Error*: ${msg}`; + return `*Internal Azurite Error*: ${msg}`; } class InternalAzuriteError extends Error { - constructor(msg) { - super(msg); - this.message = _generateErrorMessage(msg); - Error.captureStackTrace(this, this.constructor); - } + constructor(msg) { + super(msg); + this.message = _generateErrorMessage(msg); + Error.captureStackTrace(this, this.constructor); + } } -module.exports = InternalAzuriteError; \ No newline at end of file +module.exports = InternalAzuriteError; diff --git a/lib/core/blob/CopyOperationsManager.js b/lib/core/blob/CopyOperationsManager.js index 64f23db29..145ff4d2e 100644 --- a/lib/core/blob/CopyOperationsManager.js +++ b/lib/core/blob/CopyOperationsManager.js @@ -1,36 +1,42 @@ -'use strict'; +/** @format */ -const BbPromise = require('bluebird'), - fs = require('fs'); +"use strict"; + +const BbPromise = require("bluebird"), + fs = require("fs"); class CopyOperationsManager { - constructor() { - this.ops = {}; - } - - add(copyId, readStream, writeStream, toFilename) { - this.ops[copyId] = { readStream: readStream, writeStream: writeStream, toFilename: toFilename }; - } - - cancel(copyId) { - return new BbPromise((resolve, reject) => { - this.ops[copyId].writeStream.on('unpipe', () => { - fs.unlink(this.ops[copyId].toFilename, (err) => { - this.clear(copyId); - err ? reject(err) : resolve(); - }); - }); - this.ops[copyId].readStream.unpipe(writeStream); + constructor() { + this.ops = {}; + } + + add(copyId, readStream, writeStream, toFilename) { + this.ops[copyId] = { + readStream: readStream, + writeStream: writeStream, + toFilename: toFilename, + }; + } + + cancel(copyId) { + return new BbPromise((resolve, reject) => { + this.ops[copyId].writeStream.on("unpipe", () => { + fs.unlink(this.ops[copyId].toFilename, (err) => { + this.clear(copyId); + err ? reject(err) : resolve(); }); - } + }); + this.ops[copyId].readStream.unpipe(writeStream); + }); + } - clear(copyId) { - delete this.ops[copyId]; - } + clear(copyId) { + delete this.ops[copyId]; + } - isPending(copyId) { - return this.ops[copyId] !== undefined; - } + isPending(copyId) { + return this.ops[copyId] !== undefined; + } } -module.exports = new CopyOperationsManager(); \ No newline at end of file +module.exports = new CopyOperationsManager(); diff --git a/lib/core/blob/SnapshotTimeManager.js b/lib/core/blob/SnapshotTimeManager.js index aed85804d..addc17343 100644 --- a/lib/core/blob/SnapshotTimeManager.js +++ b/lib/core/blob/SnapshotTimeManager.js @@ -1,54 +1,56 @@ -'use strict'; +/** @format */ + +"use strict"; /** * Keeps track of the latest snapshot time for each blob in every container. - * This is needed since time resolution in most SDKs work on a second level. In particular, + * This is needed since time resolution in most SDKs work on a second level. In particular, * during unit-tests chances are high that subsequent snapshots of a blob collide time-wise since - * they only differ at the milliseconds level which is unlikely in a prod setting when communicating with + * they only differ at the milliseconds level which is unlikely in a prod setting when communicating with * Azure Blob Storage over network. - * + * * SnapshotTimeManager provides means to avoid such conflicts by returning a timestamp that is at least * one second greater than the last snapshot time of a particular blob. - * + * * @class SnapshotTimeManager */ class SnapshotTimeManager { - constructor() { - this.times = {}; - } + constructor() { + this.times = {}; + } - /** - * Updates / Adds the date of the latest snapshot of a particular blob. - * - * @param {String} id of the blob - * @param {Date} date - * - * @memberof SnapshotTimeManager - */ - _update(id, date) { - this.times[id] = date; - } + /** + * Updates / Adds the date of the latest snapshot of a particular blob. + * + * @param {String} id of the blob + * @param {Date} date + * + * @memberof SnapshotTimeManager + */ + _update(id, date) { + this.times[id] = date; + } - /** - * Returns a timestamp (UTC String) that is at least one second greater than the - * last snapshot time of a particular blob. - * - * @param {String} id of the blob - * @param {Date} now reference time for the snapshot to be taken - * - * @memberof SnapshotTimeManager - */ - getDate(id, now) { - const date = this.times[id]; - if (date === undefined || (now.getTime() - date.getTime()) > 1000) { - this._update(id, now); - return now; - } - const updatedDate = new Date(date); - updatedDate.setSeconds(date.getSeconds() + 1); - this._update(id, updatedDate); - return updatedDate; + /** + * Returns a timestamp (UTC String) that is at least one second greater than the + * last snapshot time of a particular blob. + * + * @param {String} id of the blob + * @param {Date} now reference time for the snapshot to be taken + * + * @memberof SnapshotTimeManager + */ + getDate(id, now) { + const date = this.times[id]; + if (date === undefined || now.getTime() - date.getTime() > 1000) { + this._update(id, now); + return now; } + const updatedDate = new Date(date); + updatedDate.setSeconds(date.getSeconds() + 1); + this._update(id, updatedDate); + return updatedDate; + } } -module.exports = new SnapshotTimeManager(); \ No newline at end of file +module.exports = new SnapshotTimeManager(); diff --git a/lib/core/blob/StorageManager.js b/lib/core/blob/StorageManager.js index a7fdc9bfc..e9ea38819 100644 --- a/lib/core/blob/StorageManager.js +++ b/lib/core/blob/StorageManager.js @@ -1,828 +1,1096 @@ -'use strict'; - -const env = require('./../env'), - utils = require('./../utils'), - path = require('path'), - BbPromise = require('bluebird'), - Loki = require('lokijs'), - req = require('request'), - fs = require("fs-extra"), - fsn = BbPromise.promisifyAll(require("fs")), - crypto = require('crypto'), - StorageTables = require('./../Constants').StorageTables, - StorageEntityType = require('./../Constants').StorageEntityType, - LeaseActions = require('./../Constants').LeaseActions, - LeaseStatus = require('./../Constants').LeaseStatus, - CopyStatus = require('./../Constants').CopyStatus, - BlockListType = require('./../Constants').BlockListType, - StorageEntityGenerator = require('./../../model/blob/StorageEntityGenerator'), - AzuriteBlobRequest = require('./../../model/blob/AzuriteBlobRequest'), - CombinedStream = require('combined-stream'), - ContainerProxy = require('./../../model/blob/ContainerProxy'), - BlobProxy = require('./../../model/blob/BlobProxy'), - N = require('./../../core/HttpHeaderNames'), - ContainerRequest = require('./../../model/blob/AzuriteContainerRequest'), - AzuriteResponse = require('./../../model/blob/AzuriteResponse'), - BlobRequest = require('./../../model/blob/AzuriteBlobRequest'), - SnapshotTimeManager = require('./SnapshotTimeManager'), - CopyOperationsManager = require('./CopyOperationsManager'), - uuidv4 = require('uuid/v4'); +/** @format */ -class StorageManager { - constructor() { - } +"use strict"; - init() { - this.db = BbPromise.promisifyAll(new Loki(env.azuriteDBPathBlob, { autosave: true, autosaveInterval: 5000 })); - return fsn.statAsync(env.azuriteDBPathBlob) - .then((stat) => { - return this.db.loadDatabaseAsync({}); - }) - .then((data) => { - if (!this.db.getCollection(StorageTables.Containers)) { - this.db.addCollection(StorageTables.Containers); - } - if (!this.db.getCollection(StorageTables.ServiceProperties)) { - this.db.addCollection(StorageTables.ServiceProperties); - } - return this.db.saveDatabaseAsync(); - }) - .catch((e) => { - if (e.code === 'ENOENT') { - // No DB has been persisted / initialized yet. - this.db.addCollection(StorageTables.Containers); - this.db.addCollection(StorageTables.ServiceProperties); - // See https://github.com/arafato/azurite/issues/155: - // Azure Storage Explorer expects an existing $logs folder at initial start. - const logsStub = { - metaProps: {}, - entityType: StorageEntityType.Container, - containerName: '$logs', - httpProps: {} - }; - logsStub.httpProps[N.BLOB_PUBLIC_ACCESS] = 'private'; - return this.createContainer(logsStub) - .then(() => { - return this.db.saveDatabaseAsync(); - }); - } - // This should never happen! - console.error(`Failed to initialize database at "${env.azuriteDBPathBlob}"`); - throw e; - }); - } +const env = require("./../env"), + utils = require("./../utils"), + path = require("path"), + BbPromise = require("bluebird"), + Loki = require("lokijs"), + req = require("request"), + fs = require("fs-extra"), + fsn = BbPromise.promisifyAll(require("fs")), + crypto = require("crypto"), + StorageTables = require("./../Constants").StorageTables, + StorageEntityType = require("./../Constants").StorageEntityType, + LeaseActions = require("./../Constants").LeaseActions, + LeaseStatus = require("./../Constants").LeaseStatus, + CopyStatus = require("./../Constants").CopyStatus, + BlockListType = require("./../Constants").BlockListType, + StorageEntityGenerator = require("./../../model/blob/StorageEntityGenerator"), + AzuriteBlobRequest = require("./../../model/blob/AzuriteBlobRequest"), + CombinedStream = require("combined-stream"), + ContainerProxy = require("./../../model/blob/ContainerProxy"), + BlobProxy = require("./../../model/blob/BlobProxy"), + N = require("./../../core/HttpHeaderNames"), + ContainerRequest = require("./../../model/blob/AzuriteContainerRequest"), + AzuriteResponse = require("./../../model/blob/AzuriteResponse"), + BlobRequest = require("./../../model/blob/AzuriteBlobRequest"), + SnapshotTimeManager = require("./SnapshotTimeManager"), + CopyOperationsManager = require("./CopyOperationsManager"), + uuidv4 = require("uuid/v4"); - flush() { +class StorageManager { + constructor() {} + + init() { + this.db = BbPromise.promisifyAll( + new Loki(env.azuriteDBPathBlob, { + autosave: true, + autosaveInterval: 5000, + }) + ); + return fsn + .statAsync(env.azuriteDBPathBlob) + .then((stat) => { + return this.db.loadDatabaseAsync({}); + }) + .then((data) => { + if (!this.db.getCollection(StorageTables.Containers)) { + this.db.addCollection(StorageTables.Containers); + } + if (!this.db.getCollection(StorageTables.ServiceProperties)) { + this.db.addCollection(StorageTables.ServiceProperties); + } return this.db.saveDatabaseAsync(); - } + }) + .catch((e) => { + if (e.code === "ENOENT") { + // No DB has been persisted / initialized yet. + this.db.addCollection(StorageTables.Containers); + this.db.addCollection(StorageTables.ServiceProperties); + // See https://github.com/arafato/azurite/issues/155: + // Azure Storage Explorer expects an existing $logs folder at initial start. + const logsStub = { + metaProps: {}, + entityType: StorageEntityType.Container, + containerName: "$logs", + httpProps: {}, + }; + logsStub.httpProps[N.BLOB_PUBLIC_ACCESS] = "private"; + return this.createContainer(logsStub).then(() => { + return this.db.saveDatabaseAsync(); + }); + } + // This should never happen! + console.error( + `Failed to initialize database at "${env.azuriteDBPathBlob}"` + ); + throw e; + }); + } - close(){ - return this.db.close(); - } + flush() { + return this.db.saveDatabaseAsync(); + } - createContainer(request) { - const coll = this.db.getCollection(StorageTables.Containers); - const entity = StorageEntityGenerator.generateStorageEntity(request); - const containerProxy = new ContainerProxy(coll.insert(entity)); - this.db.addCollection(entity.name); - return BbPromise.resolve(new AzuriteResponse({ proxy: containerProxy, cors: request.cors })); - } + close() { + return this.db.close(); + } - deleteContainer(request) { - const conColl = this.db.getCollection(StorageTables.Containers); - conColl.chain().find({ 'name': { '$eq': request.containerName } }).remove(); - const entities = this.db.getCollection(request.containerName).chain() - .find({ 'name': { '$contains': '' } }).data(); // get every entity in this collection - const promises = []; + createContainer(request) { + const coll = this.db.getCollection(StorageTables.Containers); + const entity = StorageEntityGenerator.generateStorageEntity(request); + const containerProxy = new ContainerProxy(coll.insert(entity)); + this.db.addCollection(entity.name); + return BbPromise.resolve( + new AzuriteResponse({ proxy: containerProxy, cors: request.cors }) + ); + } - for (const entity of entities) { - promises.push(fs.remove(entity.uri)); - } - return BbPromise.all(promises) - .then(() => { - this.db.removeCollection(request.containerName); - return new AzuriteResponse({ cors: request.cors }); - }); - } + deleteContainer(request) { + const conColl = this.db.getCollection(StorageTables.Containers); + conColl + .chain() + .find({ name: { $eq: request.containerName } }) + .remove(); + const entities = this.db + .getCollection(request.containerName) + .chain() + .find({ name: { $contains: "" } }) + .data(); // get every entity in this collection + const promises = []; - listContainer(request, prefix, maxresults) { - maxresults = parseInt(maxresults); - let tables = this.db.getCollection(StorageTables.Containers); - let result = tables.chain() - .find({ 'name': { '$regex': `^${prefix}` } }) - .simplesort('name') - .limit(maxresults) - .data(); - return BbPromise.resolve(new AzuriteResponse({ payload: result, cors: request.cors })); + for (const entity of entities) { + promises.push(fs.remove(entity.uri)); } + return BbPromise.all(promises).then(() => { + this.db.removeCollection(request.containerName); + return new AzuriteResponse({ cors: request.cors }); + }); + } - putBlob(request) { - const coll = this.db.getCollection(request.containerName), - blobProxy = this._createOrUpdateBlob(coll, request); - this._clearCopyMetaData(blobProxy); - return fs.outputFile(request.uri, request.body, { encoding: request.httpProps[N.CONTENT_ENCODING] }) - .then(() => { - return new AzuriteResponse({ proxy: blobProxy, cors: request.cors }); - }); - } + listContainer(request, prefix, maxresults) { + maxresults = parseInt(maxresults); + let tables = this.db.getCollection(StorageTables.Containers); + let result = tables + .chain() + .find({ name: { $regex: `^${prefix}` } }) + .simplesort("name") + .limit(maxresults) + .data(); + return BbPromise.resolve( + new AzuriteResponse({ payload: result, cors: request.cors }) + ); + } - putAppendBlock(request) { - const { coll, blobProxy } = this._getCollectionAndBlob(request.containerName, request.id); - blobProxy.original[N.BLOB_COMMITTED_BLOCK_COUNT] += 1; - blobProxy.original.size += request.body.length; - coll.update(blobProxy.release()); - return fsn.appendFileAsync(request.uri, request.body, { encoding: request.httpProps[N.CONTENT_ENCODING] }) - .then(() => { - return new AzuriteResponse({ proxy: blobProxy, cors: request.cors }); - }); - } + putBlob(request) { + const coll = this.db.getCollection(request.containerName), + blobProxy = this._createOrUpdateBlob(coll, request); + this._clearCopyMetaData(blobProxy); + return fs + .outputFile(request.uri, request.body, { + encoding: request.httpProps[N.CONTENT_ENCODING], + }) + .then(() => { + return new AzuriteResponse({ proxy: blobProxy, cors: request.cors }); + }); + } - deleteBlob(request) { - const coll = this.db.getCollection(request.containerName), - snapshoteDeleteQueryParam = request.httpProps[N.DELETE_SNAPSHOTS]; - let promises = []; + putAppendBlock(request) { + const { coll, blobProxy } = this._getCollectionAndBlob( + request.containerName, + request.id + ); + blobProxy.original[N.BLOB_COMMITTED_BLOCK_COUNT] += 1; + blobProxy.original.size += request.body.length; + coll.update(blobProxy.release()); + return fsn + .appendFileAsync(request.uri, request.body, { + encoding: request.httpProps[N.CONTENT_ENCODING], + }) + .then(() => { + return new AzuriteResponse({ proxy: blobProxy, cors: request.cors }); + }); + } - if (snapshoteDeleteQueryParam === 'include' || snapshoteDeleteQueryParam === 'only') { - const result = coll.chain().find({ 'originId': { '$eq': request.id } }); - for (const entity of result.data()) { - promises.push(fs.remove(entity.uri)); - } - result.remove(); + deleteBlob(request) { + const coll = this.db.getCollection(request.containerName), + snapshoteDeleteQueryParam = request.httpProps[N.DELETE_SNAPSHOTS]; + let promises = []; - if (snapshoteDeleteQueryParam === 'include') { - coll.chain().find({ 'id': { '$eq': request.id } }).remove(); - promises.push(fs.remove(request.uri)); - } - return BbPromise.all(promises) - .then(() => { - return new AzuriteResponse({ cors: request.cors }); - }); - } else { - coll.chain().find({ 'id': { '$eq': request.id } }).remove(); - coll.chain().find({ 'parentId': { '$eq': request.id } }).remove(); // Removing (un-)committed blocks - return fs.remove(request.uri) - .then(() => { - return new AzuriteResponse({ cors: request.cors }); - }); - } + if ( + snapshoteDeleteQueryParam === "include" || + snapshoteDeleteQueryParam === "only" + ) { + const result = coll.chain().find({ originId: { $eq: request.id } }); + for (const entity of result.data()) { + promises.push(fs.remove(entity.uri)); + } + result.remove(); + + if (snapshoteDeleteQueryParam === "include") { + coll + .chain() + .find({ id: { $eq: request.id } }) + .remove(); + promises.push(fs.remove(request.uri)); + } + return BbPromise.all(promises).then(() => { + return new AzuriteResponse({ cors: request.cors }); + }); + } else { + coll + .chain() + .find({ id: { $eq: request.id } }) + .remove(); + coll + .chain() + .find({ parentId: { $eq: request.id } }) + .remove(); // Removing (un-)committed blocks + return fs.remove(request.uri).then(() => { + return new AzuriteResponse({ cors: request.cors }); + }); } + } - getBlob(request) { - const coll = this.db.getCollection(request.containerName); - const blob = coll.chain() - .find({ 'id': { '$eq': request.id } }) - .data()[0]; + getBlob(request) { + const coll = this.db.getCollection(request.containerName); + const blob = coll + .chain() + .find({ id: { $eq: request.id } }) + .data()[0]; - const response = new AzuriteResponse({ proxy: new BlobProxy(blob, request.containerName), cors: request.cors }); - return BbPromise.resolve(response); - } + const response = new AzuriteResponse({ + proxy: new BlobProxy(blob, request.containerName), + cors: request.cors, + }); + return BbPromise.resolve(response); + } - listBlobs(request, query) { - const condition = []; - if (query.prefix !== '') { - condition.push({ - 'name': { '$regex': `^${query.prefix}` } - }); - } - condition.push({ - 'parentId': { '$eq': undefined } // blocks should never be part of the listing - }); - const includeParams = query.include ? query.include.split(',') : []; - if (!includeParams.includes('snapshots')) { - condition.push({ - 'snapshot': { '$eq': false } - }); - } - if (!includeParams.includes('uncommittedblobs')) { - condition.push({ - 'committed': { '$eq': true } - }); - } - const coll = this.db.getCollection(request.containerName); - let blobs = coll.chain() - .find({ - '$and': condition - }) - .simplesort('name'); - const totalHits = blobs.count(); - const offset = query.marker !== undefined ? query.marker : 0; - blobs = blobs.offset(offset); - const response = new AzuriteResponse({ payload: BlobProxy.createFromArray(blobs.limit(query.maxresults).data(), request.containerName), cors: request.cors }); - response.nextMarker = (totalHits > query.maxresults + offset) ? (query.maxresults + offset) : 0; - return BbPromise.resolve(response); + listBlobs(request, query) { + const condition = []; + if (query.prefix !== "") { + condition.push({ + name: { $regex: `^${query.prefix}` }, + }); } + condition.push({ + parentId: { $eq: undefined }, // blocks should never be part of the listing + }); + const includeParams = query.include ? query.include.split(",") : []; + if (!includeParams.includes("snapshots")) { + condition.push({ + snapshot: { $eq: false }, + }); + } + if (!includeParams.includes("uncommittedblobs")) { + condition.push({ + committed: { $eq: true }, + }); + } + const coll = this.db.getCollection(request.containerName); + let blobs = coll + .chain() + .find({ + $and: condition, + }) + .simplesort("name"); + const totalHits = blobs.count(); + const offset = query.marker !== undefined ? query.marker : 0; + blobs = blobs.offset(offset); + const response = new AzuriteResponse({ + payload: BlobProxy.createFromArray( + blobs.limit(query.maxresults).data(), + request.containerName + ), + cors: request.cors, + }); + response.nextMarker = + totalHits > query.maxresults + offset ? query.maxresults + offset : 0; + return BbPromise.resolve(response); + } - putBlock(request) { - // We only create the parent blob in DB if it does not already exists. - const { coll, blobProxy } = this._getCollectionAndBlob(request.containerName, request.parentId); - if (blobProxy === undefined) { - // If blockId is set we would generate a commit storage entity, thus we - // clone the original request and set blockId to undefined - const parentBlobRequest = AzuriteBlobRequest.clone(request); - parentBlobRequest.id = parentBlobRequest.parentId; - parentBlobRequest.uri = env.diskStorageUri(parentBlobRequest.id); - delete parentBlobRequest.parentId; - delete parentBlobRequest.blockId; - parentBlobRequest.commit = false; - parentBlobRequest.body = undefined; - this._createOrUpdateBlob(coll, parentBlobRequest); - } - // Storing block information in DB. - const blockProxy = this._createOrUpdateBlob(coll, request); - // Make sure that the parent blob exists on storage. - return fs.ensureFile(request.parentUri) - .then(() => { - return fs.outputFile(request.uri, request.body, { encoding: request.httpProps[N.CONTENT_ENCODING] }); - }) - .then(() => { - return new AzuriteResponse({ proxy: blockProxy, cors: request.cors }); - }); + putBlock(request) { + // We only create the parent blob in DB if it does not already exists. + const { coll, blobProxy } = this._getCollectionAndBlob( + request.containerName, + request.parentId + ); + if (blobProxy === undefined) { + // If blockId is set we would generate a commit storage entity, thus we + // clone the original request and set blockId to undefined + const parentBlobRequest = AzuriteBlobRequest.clone(request); + parentBlobRequest.id = parentBlobRequest.parentId; + parentBlobRequest.uri = env.diskStorageUri(parentBlobRequest.id); + delete parentBlobRequest.parentId; + delete parentBlobRequest.blockId; + parentBlobRequest.commit = false; + parentBlobRequest.body = undefined; + this._createOrUpdateBlob(coll, parentBlobRequest); } + // Storing block information in DB. + const blockProxy = this._createOrUpdateBlob(coll, request); + // Make sure that the parent blob exists on storage. + return fs + .ensureFile(request.parentUri) + .then(() => { + return fs.outputFile(request.uri, request.body, { + encoding: request.httpProps[N.CONTENT_ENCODING], + }); + }) + .then(() => { + return new AzuriteResponse({ proxy: blockProxy, cors: request.cors }); + }); + } - putBlockList(request) { - let blockPaths = []; - for (const block of request.payload) { - const blockId = env.blockId(request.containerName, request.blobName, block.id); - blockPaths.push(env.diskStorageUri(blockId)); - } - // Updating properties of blob - const coll = this.db.getCollection(request.containerName); - const blobProxy = this._createOrUpdateBlob(coll, request); - // Writing multiple blocks to one blob - const combinedStream = CombinedStream.create(); - for (const path of blockPaths) { - combinedStream.append(fs.createReadStream(path)); - } - return new BbPromise((resolve, reject) => { - const destinationStream = fs.createWriteStream(request.uri); - destinationStream - .on('error', (e) => { - reject(e); + putBlockList(request) { + let blockPaths = []; + for (const block of request.payload) { + const blockId = env.blockId( + request.containerName, + request.blobName, + block.id + ); + blockPaths.push(env.diskStorageUri(blockId)); + } + // Updating properties of blob + const coll = this.db.getCollection(request.containerName); + const blobProxy = this._createOrUpdateBlob(coll, request); + // Writing multiple blocks to one blob + const combinedStream = CombinedStream.create(); + for (const path of blockPaths) { + combinedStream.append(fs.createReadStream(path)); + } + return new BbPromise((resolve, reject) => { + const destinationStream = fs.createWriteStream(request.uri); + destinationStream + .on("error", (e) => { + reject(e); + }) + .on("finish", () => { + let totalSize = 0; + // Set Blocks in DB to committed = true, delete blocks not in BlockList + const promises = []; + const blocks = coll + .chain() + .find({ parentId: request.id }) + .data(); + for (const block of blocks) { + if ( + request.payload + .map((e) => { + return e.id; }) - .on('finish', () => { - let totalSize = 0; - // Set Blocks in DB to committed = true, delete blocks not in BlockList - const promises = []; - const blocks = coll.chain() - .find({ parentId: request.id }) - .data(); - for (const block of blocks) { - if (request.payload.map((e) => { return e.id }).indexOf(block.blockId) !== -1) { - block.committed = true; - totalSize += block.size; - coll.update(block); - } else { - coll.remove(block); - promises.push(fs.remove(block.uri)); - } - } - return BbPromise.all(promises) - .then(() => { - blobProxy.original.size = totalSize; - this._clearCopyMetaData(blobProxy); - coll.update(blobProxy.release()); - resolve(new AzuriteResponse({ proxy: blobProxy, cors: request.cors })); - }); - }); - combinedStream.pipe(destinationStream); + .indexOf(block.blockId) !== -1 + ) { + block.committed = true; + totalSize += block.size; + coll.update(block); + } else { + coll.remove(block); + promises.push(fs.remove(block.uri)); + } + } + return BbPromise.all(promises).then(() => { + blobProxy.original.size = totalSize; + this._clearCopyMetaData(blobProxy); + coll.update(blobProxy.release()); + resolve( + new AzuriteResponse({ proxy: blobProxy, cors: request.cors }) + ); + }); }); - } + combinedStream.pipe(destinationStream); + }); + } - getBlockList(request) { - const coll = this.db.getCollection(request.containerName), - blocks = coll.chain() - .find({ parentId: request.id }) - .data(); + getBlockList(request) { + const coll = this.db.getCollection(request.containerName), + blocks = coll + .chain() + .find({ parentId: request.id }) + .data(); - const { blobProxy } = this._getCollectionAndBlob(request.containerName, request.id); - const response = new AzuriteResponse({ proxy: blobProxy, payload: blocks, cors: request.cors }); - return BbPromise.resolve(response); - } + const { blobProxy } = this._getCollectionAndBlob( + request.containerName, + request.id + ); + const response = new AzuriteResponse({ + proxy: blobProxy, + payload: blocks, + cors: request.cors, + }); + return BbPromise.resolve(response); + } - setBlobMetadata(request) { - const { coll, blobProxy } = this._getCollectionAndBlob(request.containerName, request.id); - blobProxy.original.metaProps = request.metaProps; - coll.update(blobProxy.release()); - const response = new AzuriteResponse({ proxy: blobProxy, cors: request.cors }); - return BbPromise.resolve(response); - } + setBlobMetadata(request) { + const { coll, blobProxy } = this._getCollectionAndBlob( + request.containerName, + request.id + ); + blobProxy.original.metaProps = request.metaProps; + coll.update(blobProxy.release()); + const response = new AzuriteResponse({ + proxy: blobProxy, + cors: request.cors, + }); + return BbPromise.resolve(response); + } - getBlobMetadata(request) { - const { blobProxy } = this._getCollectionAndBlob(request.containerName, request.id); - const response = new AzuriteResponse({ proxy: blobProxy, cors: request.cors }); - return BbPromise.resolve(response); - } + getBlobMetadata(request) { + const { blobProxy } = this._getCollectionAndBlob( + request.containerName, + request.id + ); + const response = new AzuriteResponse({ + proxy: blobProxy, + cors: request.cors, + }); + return BbPromise.resolve(response); + } - setBlobProperties(request) { - const { coll, blobProxy } = this._getCollectionAndBlob(request.containerName, request.id); - request.httpProps[N.CACHE_CONTROL] ? blobProxy.original.cacheControl = request.httpProps[N.CACHE_CONTROL] : delete blobProxy.original.cacheControl; - request.httpProps[N.CONTENT_TYPE] ? blobProxy.original.contentType = request.httpProps[N.CONTENT_TYPE] : delete blobProxy.original.contentType; - request.httpProps[N.CONTENT_ENCODING] ? blobProxy.original.contentEncoding = request.httpProps[N.CONTENT_ENCODING] : delete blobProxy.original.contentEncoding; - request.httpProps[N.CONTENT_LANGUAGE] ? blobProxy.original.contentLanguage = request.httpProps[N.CONTENT_LANGUAGE] : delete blobProxy.original.contentLanguage; - request.httpProps[N.CONTENT_DISPOSITION] ? blobProxy.original.contentDisposition = request.httpProps[N.CONTENT_DISPOSITION] : delete blobProxy.original.contentDisposition; - request.httpProps[N.CONTENT_MD5] ? blobProxy.original.md5 = request.httpProps[N.CONTENT_MD5] : request.calculateContentMd5(); - this._clearCopyMetaData(blobProxy); - coll.update(blobProxy.release()); - const response = new AzuriteResponse({ proxy: blobProxy, cors: request.cors }); - return BbPromise.resolve(response); - } + setBlobProperties(request) { + const { coll, blobProxy } = this._getCollectionAndBlob( + request.containerName, + request.id + ); + request.httpProps[N.CACHE_CONTROL] + ? (blobProxy.original.cacheControl = request.httpProps[N.CACHE_CONTROL]) + : delete blobProxy.original.cacheControl; + request.httpProps[N.CONTENT_TYPE] + ? (blobProxy.original.contentType = request.httpProps[N.CONTENT_TYPE]) + : delete blobProxy.original.contentType; + request.httpProps[N.CONTENT_ENCODING] + ? (blobProxy.original.contentEncoding = + request.httpProps[N.CONTENT_ENCODING]) + : delete blobProxy.original.contentEncoding; + request.httpProps[N.CONTENT_LANGUAGE] + ? (blobProxy.original.contentLanguage = + request.httpProps[N.CONTENT_LANGUAGE]) + : delete blobProxy.original.contentLanguage; + request.httpProps[N.CONTENT_DISPOSITION] + ? (blobProxy.original.contentDisposition = + request.httpProps[N.CONTENT_DISPOSITION]) + : delete blobProxy.original.contentDisposition; + request.httpProps[N.CONTENT_MD5] + ? (blobProxy.original.md5 = request.httpProps[N.CONTENT_MD5]) + : request.calculateContentMd5(); + this._clearCopyMetaData(blobProxy); + coll.update(blobProxy.release()); + const response = new AzuriteResponse({ + proxy: blobProxy, + cors: request.cors, + }); + return BbPromise.resolve(response); + } - getBlobProperties(request) { - // Same OP, different response headers are filtered and processeed at handler level - return this.getBlobMetadata(request); - } + getBlobProperties(request) { + // Same OP, different response headers are filtered and processeed at handler level + return this.getBlobMetadata(request); + } - setContainerMetadata(request) { - const { coll, containerProxy } = this._getCollectionAndContainer(request.containerName); - containerProxy.original.metaProps = request.metaProps; - coll.update(containerProxy.release()); - const response = new AzuriteResponse({ proxy: containerProxy, cors: request.cors }); - return BbPromise.resolve(response); - } + setContainerMetadata(request) { + const { coll, containerProxy } = this._getCollectionAndContainer( + request.containerName + ); + containerProxy.original.metaProps = request.metaProps; + coll.update(containerProxy.release()); + const response = new AzuriteResponse({ + proxy: containerProxy, + cors: request.cors, + }); + return BbPromise.resolve(response); + } - getContainerMetadata(request) { - const { containerProxy } = this._getCollectionAndContainer(request.containerName); - const response = new AzuriteResponse({ proxy: containerProxy, cors: request.cors }); - return BbPromise.resolve(response); - } + getContainerMetadata(request) { + const { containerProxy } = this._getCollectionAndContainer( + request.containerName + ); + const response = new AzuriteResponse({ + proxy: containerProxy, + cors: request.cors, + }); + return BbPromise.resolve(response); + } - getContainerProperties(request) { - const { containerProxy } = this._getCollectionAndContainer(request.containerName); - const response = new AzuriteResponse({ proxy: containerProxy, cors: request.cors }); - return BbPromise.resolve(response); - } + getContainerProperties(request) { + const { containerProxy } = this._getCollectionAndContainer( + request.containerName + ); + const response = new AzuriteResponse({ + proxy: containerProxy, + cors: request.cors, + }); + return BbPromise.resolve(response); + } - putPage(request) { - const parts = request.httpProps[N.RANGE].split('=')[1].split('-'), - startByte = parseInt(parts[0]), - endByte = parseInt(parts[1]); - // Getting overlapping pages (sorted by startByte in ascending order) - const { coll, blobProxy } = this._getCollectionAndBlob(request.containerName, request.id); - const pageRanges = coll.chain() - .find({ - '$and': [ - { 'end': { '$gte': startByte / 512 } }, - { 'start': { '$lte': (endByte + 1) / 512 } }, - { 'parentId': { '$eq': request.id } }] - }) - .sort((a, b) => { - return a.start - b.start; - }) - .data(); + putPage(request) { + const parts = request.httpProps[N.RANGE].split("=")[1].split("-"), + startByte = parseInt(parts[0]), + endByte = parseInt(parts[1]); + // Getting overlapping pages (sorted by startByte in ascending order) + const { coll, blobProxy } = this._getCollectionAndBlob( + request.containerName, + request.id + ); + const pageRanges = coll + .chain() + .find({ + $and: [ + { end: { $gte: startByte / 512 } }, + { start: { $lte: (endByte + 1) / 512 } }, + { parentId: { $eq: request.id } }, + ], + }) + .sort((a, b) => { + return a.start - b.start; + }) + .data(); - const pageWriteMode = request.httpProps[N.PAGE_WRITE]; - const isClear = pageWriteMode.toLowerCase() === 'clear'; + const pageWriteMode = request.httpProps[N.PAGE_WRITE]; + const isClear = pageWriteMode.toLowerCase() === "clear"; - this._updatePageRanges(coll, pageRanges, startByte, endByte, request.id, isClear); + this._updatePageRanges( + coll, + pageRanges, + startByte, + endByte, + request.id, + isClear + ); - const writeStream = fs.createWriteStream(request.uri, { - flags: 'r+', - start: startByte, - defaultEncoding: 'utf8' - }); + const writeStream = fs.createWriteStream(request.uri, { + flags: "r+", + start: startByte, + defaultEncoding: "utf8", + }); - let pageContent; - if (isClear) { - // Zeroes will be written to the file to scrub the data - pageContent = new Array(endByte - startByte + 1).fill('\0').join(''); - } else { - // Must be an update operation because it has already been verified in - // PageBlobHeaderSanity that the write mode is either 'clear' or 'update'. - // The request data will be written to the file - pageContent = request.body; - } + let pageContent; + if (isClear) { + // Zeroes will be written to the file to scrub the data + pageContent = new Array(endByte - startByte + 1).fill("\0").join(""); + } else { + // Must be an update operation because it has already been verified in + // PageBlobHeaderSanity that the write mode is either 'clear' or 'update'. + // The request data will be written to the file + pageContent = request.body; + } - return new BbPromise((resolve, reject) => { - writeStream - .on('error', (e) => { - reject(e); - }) - .on('finish', () => { - // Fixme: Use async / non-blocking method instead - blobProxy.original.size = fsn.statSync(request.uri).size; - blobProxy.original.sequenceNumber++; - coll.update(blobProxy.release()); - const response = new AzuriteResponse({ proxy: blobProxy, cors: request.cors }); - resolve(response); - }); - writeStream.write(pageContent); - writeStream.end(); + return new BbPromise((resolve, reject) => { + writeStream + .on("error", (e) => { + reject(e); + }) + .on("finish", () => { + // Fixme: Use async / non-blocking method instead + blobProxy.original.size = fsn.statSync(request.uri).size; + blobProxy.original.sequenceNumber++; + coll.update(blobProxy.release()); + const response = new AzuriteResponse({ + proxy: blobProxy, + cors: request.cors, + }); + resolve(response); }); - } + writeStream.write(pageContent); + writeStream.end(); + }); + } - getPageRanges(request) { - let pageRanges; - const { coll, blobProxy } = this._getCollectionAndBlob(request.containerName, request.id); - if (request.httpProps[N.RANGE]) { - // If range exists it is guaranteed to be well-formed due to PageAlignment validation - const parts = request.httpProps[N.RANGE].split('=')[1].split('-'), - startByte = parseInt(parts[0]), - endByte = parseInt(parts[1]), - startAlignment = startByte / 512, - endAlignment = (endByte + 1) / 512; - - pageRanges = coll.chain() - .find({ - '$and': [ - { 'end': { '$gt': startAlignment } }, - { 'start': { '$lt': endAlignment } }, - { 'parentId': { '$eq': request.id } }] - }) - .sort((a, b) => { - return a.start - b.start; - }) - .data(); + getPageRanges(request) { + let pageRanges; + const { coll, blobProxy } = this._getCollectionAndBlob( + request.containerName, + request.id + ); + if (request.httpProps[N.RANGE]) { + // If range exists it is guaranteed to be well-formed due to PageAlignment validation + const parts = request.httpProps[N.RANGE].split("=")[1].split("-"), + startByte = parseInt(parts[0]), + endByte = parseInt(parts[1]), + startAlignment = startByte / 512, + endAlignment = (endByte + 1) / 512; - // Trim the page ranges being returned to fit inside the request range. - const firstPage = pageRanges[0]; - const lastPage = pageRanges[pageRanges.length - 1]; - if (firstPage && firstPage.start < startAlignment) { - firstPage.start = startAlignment; - } - if (lastPage && lastPage.end > endAlignment) { - lastPage.end = endAlignment; - } - } else { - pageRanges = coll.chain() - .find({ 'parentId': { '$eq': request.id } }) - .sort((a, b) => { - return a.start - b.start; - }) - .data(); - } + pageRanges = coll + .chain() + .find({ + $and: [ + { end: { $gt: startAlignment } }, + { start: { $lt: endAlignment } }, + { parentId: { $eq: request.id } }, + ], + }) + .sort((a, b) => { + return a.start - b.start; + }) + .data(); - const response = new AzuriteResponse({ proxy: blobProxy, payload: pageRanges, cors: request.cors }); - return BbPromise.resolve(response); + // Trim the page ranges being returned to fit inside the request range. + const firstPage = pageRanges[0]; + const lastPage = pageRanges[pageRanges.length - 1]; + if (firstPage && firstPage.start < startAlignment) { + firstPage.start = startAlignment; + } + if (lastPage && lastPage.end > endAlignment) { + lastPage.end = endAlignment; + } + } else { + pageRanges = coll + .chain() + .find({ parentId: { $eq: request.id } }) + .sort((a, b) => { + return a.start - b.start; + }) + .data(); } - setContainerAcl(request) { - const { coll, containerProxy } = this._getCollectionAndContainer(request.containerName); - containerProxy.original.signedIdentifiers = request.payload; - containerProxy.original.access = request.httpProps[N.BLOB_PUBLIC_ACCESS]; - coll.update(containerProxy.release()); - const response = new AzuriteResponse({ proxy: containerProxy, cors: request.cors }); - return BbPromise.resolve(response); - } + const response = new AzuriteResponse({ + proxy: blobProxy, + payload: pageRanges, + cors: request.cors, + }); + return BbPromise.resolve(response); + } + + setContainerAcl(request) { + const { coll, containerProxy } = this._getCollectionAndContainer( + request.containerName + ); + containerProxy.original.signedIdentifiers = request.payload; + containerProxy.original.access = request.httpProps[N.BLOB_PUBLIC_ACCESS]; + coll.update(containerProxy.release()); + const response = new AzuriteResponse({ + proxy: containerProxy, + cors: request.cors, + }); + return BbPromise.resolve(response); + } - getContainerAcl(request) { - const { containerProxy } = this._getCollectionAndContainer(request.containerName); - const response = new AzuriteResponse({ proxy: containerProxy, cors: request.cors }); - return BbPromise.resolve(response); + getContainerAcl(request) { + const { containerProxy } = this._getCollectionAndContainer( + request.containerName + ); + const response = new AzuriteResponse({ + proxy: containerProxy, + cors: request.cors, + }); + return BbPromise.resolve(response); + } + + snapshotBlob(request) { + const { coll, blobProxy } = this._getCollectionAndBlob( + request.containerName, + request.id + ); + const snapshotEntity = StorageEntityGenerator.clone(blobProxy.original); + const snapshotDate = SnapshotTimeManager.getDate( + request.id, + new Date(request.now) + ); + snapshotEntity.snapshot = true; + snapshotEntity.snapshotDate = snapshotDate.toUTCString(); + snapshotEntity.originId = request.id; + snapshotEntity.originUri = request.uri; + snapshotEntity.id = env.snapshotId( + request.containerName, + request.blobName, + snapshotEntity.snapshotDate + ); // Updating ID due to possibly changed snapshot date + snapshotEntity.uri = env.diskStorageUri(snapshotEntity.id); + const snapshotProxy = new BlobProxy( + coll.insert(snapshotEntity), + request.containerName + ); + if (Object.keys(request.metaProps).length > 0) { + snapshotProxy.original.metaProps = request.metaProps; + // The etag ans last-modified of the snapshot only changes from the original if metadata was added + snapshotProxy.updateETag(); + } else { + snapshotProxy.original.meta.updated = blobProxy.original.meta.updated; + snapshotProxy.original.meta.created = blobProxy.original.meta.created; } + return fs.copy(request.uri, snapshotProxy.original.uri).then(() => { + const response = new AzuriteResponse({ + proxy: snapshotProxy, + cors: request.cors, + }); + return response; + }); + } + + leaseContainer(request) { + const leaseAction = request.httpProps[N.LEASE_ACTION], + proposedLeaseId = request.httpProps[N.PROPOSED_LEASE_ID], + leaseId = request.httpProps[N.LEASE_ID], + leaseBreakPeriod = request.httpProps[N.LEASE_BREAK_PERIOD] + ? parseInt(request.httpProps[N.LEASE_BREAK_PERIOD]) + : undefined, + leaseDuration = request.httpProps[N.LEASE_DURATION] + ? parseInt(request.httpProps[N.LEASE_DURATION]) + : undefined; + const { coll, containerProxy } = this._getCollectionAndContainer( + request.containerName + ); + const now = request.now; - snapshotBlob(request) { - const { coll, blobProxy } = this._getCollectionAndBlob(request.containerName, request.id); - const snapshotEntity = StorageEntityGenerator.clone(blobProxy.original); - const snapshotDate = SnapshotTimeManager.getDate(request.id, new Date(request.now)); - snapshotEntity.snapshot = true; - snapshotEntity.snapshotDate = snapshotDate.toUTCString(); - snapshotEntity.originId = request.id; - snapshotEntity.originUri = request.uri; - snapshotEntity.id = env.snapshotId(request.containerName, request.blobName, snapshotEntity.snapshotDate); // Updating ID due to possibly changed snapshot date - snapshotEntity.uri = env.diskStorageUri(snapshotEntity.id); - const snapshotProxy = new BlobProxy(coll.insert(snapshotEntity), request.containerName); - if (Object.keys(request.metaProps).length > 0) { - snapshotProxy.original.metaProps = request.metaProps; - // The etag ans last-modified of the snapshot only changes from the original if metadata was added - snapshotProxy.updateETag(); + switch (leaseAction) { + case LeaseActions.ACQUIRE: + containerProxy.original.leaseId = proposedLeaseId || uuidv4(); + containerProxy.original.leaseExpiredAt = + leaseDuration === -1 ? -1 : now + leaseDuration * 1000; + containerProxy.original.leaseDuration = leaseDuration; + containerProxy.original.leaseState = LeaseStatus.LEASED; + break; + case LeaseActions.RENEW: + containerProxy.original.leaseExpiredAt = + containerProxy.original.leaseDuration === -1 + ? -1 + : now + containerProxy.original.leaseDuration * 1000; + break; + case LeaseActions.CHANGE: + containerProxy.original.leaseId = proposedLeaseId; + break; + case LeaseActions.RELEASE: + containerProxy.original.leaseState = LeaseStatus.AVAILABLE; + break; + case LeaseActions.BREAK: + if (leaseBreakPeriod === undefined) { + containerProxy.original.leaseBrokenAt = + containerProxy.original.leaseExpiredAt === -1 + ? now + : containerProxy.original.leaseExpiredAt; + } else if (containerProxy.original.leaseExpiredAt === -1) { + containerProxy.original.leaseBrokenAt = now + leaseBreakPeriod * 1000; } else { - snapshotProxy.original.meta.updated = blobProxy.original.meta.updated; - snapshotProxy.original.meta.created = blobProxy.original.meta.created; + const span = containerProxy.original.leaseExpiredAt - now; + containerProxy.original.leaseBrokenAt = + span > leaseBreakPeriod * 1000 + ? (containerProxy.original.leaseBrokenAt = + now + leaseBreakPeriod * 1000) + : (containerProxy.original.leaseBrokenAt = + containerProxy.original.leaseExpiredAt); } - return fs.copy(request.uri, snapshotProxy.original.uri) - .then(() => { - const response = new AzuriteResponse({ proxy: snapshotProxy, cors: request.cors }); - return response; - }); + containerProxy.original.leaseState = LeaseStatus.BREAKING; + break; + default: + // This should never happen due to preceding validation! + throw new Error( + `*INTERNAL ERROR*: leaseContainer: Invalid Lease Action "${leaseAction}"` + ); } + coll.update(containerProxy.release()); + const response = new AzuriteResponse({ + proxy: containerProxy, + cors: request.cors, + }); + return BbPromise.resolve(response); + } - leaseContainer(request) { - const leaseAction = request.httpProps[N.LEASE_ACTION], - proposedLeaseId = request.httpProps[N.PROPOSED_LEASE_ID], - leaseId = request.httpProps[N.LEASE_ID], - leaseBreakPeriod = (request.httpProps[N.LEASE_BREAK_PERIOD]) ? parseInt(request.httpProps[N.LEASE_BREAK_PERIOD]) : undefined, - leaseDuration = (request.httpProps[N.LEASE_DURATION]) ? parseInt(request.httpProps[N.LEASE_DURATION]) : undefined; - const { coll, containerProxy } = this._getCollectionAndContainer(request.containerName); - const now = request.now; - - switch (leaseAction) { - case LeaseActions.ACQUIRE: - containerProxy.original.leaseId = proposedLeaseId || uuidv4(); - containerProxy.original.leaseExpiredAt = (leaseDuration === -1) ? -1 : now + leaseDuration * 1000; - containerProxy.original.leaseDuration = leaseDuration; - containerProxy.original.leaseState = LeaseStatus.LEASED; - break; - case LeaseActions.RENEW: - containerProxy.original.leaseExpiredAt = (containerProxy.original.leaseDuration === -1) ? -1 : now + containerProxy.original.leaseDuration * 1000; - break; - case LeaseActions.CHANGE: - containerProxy.original.leaseId = proposedLeaseId; - break; - case LeaseActions.RELEASE: - containerProxy.original.leaseState = LeaseStatus.AVAILABLE; - break; - case LeaseActions.BREAK: - if (leaseBreakPeriod === undefined) { - containerProxy.original.leaseBrokenAt = (containerProxy.original.leaseExpiredAt === -1) ? now : containerProxy.original.leaseExpiredAt; - } else if (containerProxy.original.leaseExpiredAt === -1) { - containerProxy.original.leaseBrokenAt = now + leaseBreakPeriod * 1000; - } else { - const span = containerProxy.original.leaseExpiredAt - now; - containerProxy.original.leaseBrokenAt = (span > leaseBreakPeriod * 1000) - ? containerProxy.original.leaseBrokenAt = now + leaseBreakPeriod * 1000 - : containerProxy.original.leaseBrokenAt = containerProxy.original.leaseExpiredAt; - } - containerProxy.original.leaseState = LeaseStatus.BREAKING; - break; - default: - // This should never happen due to preceding validation! - throw new Error(`*INTERNAL ERROR*: leaseContainer: Invalid Lease Action "${leaseAction}"`); - } - coll.update(containerProxy.release()); - const response = new AzuriteResponse({ proxy: containerProxy, cors: request.cors }); - return BbPromise.resolve(response); - } + leaseBlob(request) { + const leaseAction = request.httpProps[N.LEASE_ACTION], + proposedLeaseId = request.httpProps[N.PROPOSED_LEASE_ID], + leaseId = request.httpProps[N.LEASE_ID], + leaseBreakPeriod = request.httpProps[N.LEASE_BREAK_PERIOD] + ? parseInt(request.httpProps[N.LEASE_BREAK_PERIOD]) + : undefined, + leaseDuration = request.httpProps[N.LEASE_DURATION] + ? parseInt(request.httpProps[N.LEASE_DURATION]) + : undefined; + const { coll, blobProxy } = this._getCollectionAndBlob( + request.containerName, + request.id + ); + const now = request.now; - leaseBlob(request) { - const leaseAction = request.httpProps[N.LEASE_ACTION], - proposedLeaseId = request.httpProps[N.PROPOSED_LEASE_ID], - leaseId = request.httpProps[N.LEASE_ID], - leaseBreakPeriod = (request.httpProps[N.LEASE_BREAK_PERIOD]) ? parseInt(request.httpProps[N.LEASE_BREAK_PERIOD]) : undefined, - leaseDuration = (request.httpProps[N.LEASE_DURATION]) ? parseInt(request.httpProps[N.LEASE_DURATION]) : undefined; - const { coll, blobProxy } = this._getCollectionAndBlob(request.containerName, request.id); - const now = request.now; - - switch (leaseAction) { - case LeaseActions.ACQUIRE: - blobProxy.original.leaseId = proposedLeaseId || uuidv4(); - blobProxy.original.leaseExpiredAt = (leaseDuration === -1) ? -1 : now + leaseDuration * 1000; - blobProxy.original.leaseDuration = leaseDuration; - blobProxy.original.leaseState = LeaseStatus.LEASED; - blobProxy.original.leaseETag = blobProxy.original.etag; - break; - case LeaseActions.RENEW: - blobProxy.original.leaseExpiredAt = (blobProxy.original.leaseDuration === -1) ? -1 : now + blobProxy.original.leaseDuration * 1000; - break; - case LeaseActions.CHANGE: - blobProxy.original.leaseId = proposedLeaseId; - break; - case LeaseActions.RELEASE: - blobProxy.original.leaseState = LeaseStatus.AVAILABLE; - break; - case LeaseActions.BREAK: - if (leaseBreakPeriod === undefined) { - blobProxy.original.leaseBrokenAt = (blobProxy.original.leaseExpiredAt === -1) ? now : blobProxy.original.leaseExpiredAt; - } else if (blobProxy.original.leaseExpiredAt === -1) { - blobProxy.original.leaseBrokenAt = now + leaseBreakPeriod * 1000; - } else { - const span = blobProxy.original.leaseExpiredAt - now; - blobProxy.original.leaseBrokenAt = (span > leaseBreakPeriod * 1000) - ? blobProxy.original.leaseBrokenAt = now + leaseBreakPeriod * 1000 - : blobProxy.original.leaseBrokenAt = blobProxy.original.leaseExpiredAt; - } - blobProxy.original.leaseState = LeaseStatus.BREAKING; - break; - default: - // This should never happen due to preceding validation! - throw new Error(`leaseContainer: Invalid Lease Action "${leaseAction}"`); + switch (leaseAction) { + case LeaseActions.ACQUIRE: + blobProxy.original.leaseId = proposedLeaseId || uuidv4(); + blobProxy.original.leaseExpiredAt = + leaseDuration === -1 ? -1 : now + leaseDuration * 1000; + blobProxy.original.leaseDuration = leaseDuration; + blobProxy.original.leaseState = LeaseStatus.LEASED; + blobProxy.original.leaseETag = blobProxy.original.etag; + break; + case LeaseActions.RENEW: + blobProxy.original.leaseExpiredAt = + blobProxy.original.leaseDuration === -1 + ? -1 + : now + blobProxy.original.leaseDuration * 1000; + break; + case LeaseActions.CHANGE: + blobProxy.original.leaseId = proposedLeaseId; + break; + case LeaseActions.RELEASE: + blobProxy.original.leaseState = LeaseStatus.AVAILABLE; + break; + case LeaseActions.BREAK: + if (leaseBreakPeriod === undefined) { + blobProxy.original.leaseBrokenAt = + blobProxy.original.leaseExpiredAt === -1 + ? now + : blobProxy.original.leaseExpiredAt; + } else if (blobProxy.original.leaseExpiredAt === -1) { + blobProxy.original.leaseBrokenAt = now + leaseBreakPeriod * 1000; + } else { + const span = blobProxy.original.leaseExpiredAt - now; + blobProxy.original.leaseBrokenAt = + span > leaseBreakPeriod * 1000 + ? (blobProxy.original.leaseBrokenAt = + now + leaseBreakPeriod * 1000) + : (blobProxy.original.leaseBrokenAt = + blobProxy.original.leaseExpiredAt); } - coll.update(blobProxy.release()); - const response = new AzuriteResponse({ proxy: blobProxy, cors: request.cors }); - return BbPromise.resolve(response); + blobProxy.original.leaseState = LeaseStatus.BREAKING; + break; + default: + // This should never happen due to preceding validation! + throw new Error( + `leaseContainer: Invalid Lease Action "${leaseAction}"` + ); } + coll.update(blobProxy.release()); + const response = new AzuriteResponse({ + proxy: blobProxy, + cors: request.cors, + }); + return BbPromise.resolve(response); + } - copyBlob(request) { - const sourceProxy = this._getCopySourceProxy(request); - let from = null, - to = null; - - from = fs.createReadStream(sourceProxy.original.uri); - // TODO: if blob type is block also copy committed blocks - to = fs.createWriteStream(env.diskStorageUri(request.id)); - from.pipe(to); - - request.entityType = sourceProxy.original.entityType; - - const coll = this.db.getCollection(request.containerName), - blobProxyDestination = this._createOrUpdateBlob(coll, request), - copyId = uuidv4(); - - blobProxyDestination.original.copyStatus = CopyStatus.PENDING; - blobProxyDestination.original.copyStatusDescription = ''; - blobProxyDestination.original.copyId = copyId; - CopyOperationsManager.add(copyId, from, to, env.diskStorageUri(request.id)); - let bytesCopied = 0; - to.on('finish', () => { - if (blobProxyDestination.original.copyStatus !== CopyStatus.FAILED) { - blobProxyDestination.original.copyCompletionTime = new Date().toGMTString(); - blobProxyDestination.original.copyStatus = CopyStatus.SUCCESS; - delete blobProxyDestination.original.copyStatusDescription; - blobProxyDestination.original.copySource = sourceProxy.original.uri; - const { sourceContainerName, sourceBlobName } = request.copySourceName(); - // encode blobname in case there are unicode characters which are not supported by http headers per default - blobProxyDestination.original.copySource = `http://localhost/devstoreaccount1/${sourceContainerName}/${encodeURIComponent(sourceBlobName)}`; - blobProxyDestination.original.incrementalCopy = false; - blobProxyDestination.original.size = sourceProxy.original.size; - blobProxyDestination.original.entityType = sourceProxy.original.entityType; - blobProxyDestination.original.md5 = sourceProxy.original.md5; - blobProxyDestination.original.metaProps = (Object.keys(request.metaProps).length > 0) - ? request.metaProps - : sourceProxy.original.metaProps; - if (sourceProxy.original.entityType === StorageEntityType.PageBlob) { - blobProxyDestination.original.sequenceNumber = sourceProxy.original.sequenceNumber; - } - if (sourceProxy.original.entityType === StorageEntityType.AppendBlob) { - blobProxyDestination.original[N.BLOB_COMMITTED_BLOCK_COUNT] = sourceProxy.original[N.BLOB_COMMITTED_BLOCK_COUNT]; - } - CopyOperationsManager.clear(copyId); - coll.update(blobProxyDestination.release()); - } - }); - from.on('data', (chunk) => { - bytesCopied += chunk.length; - blobProxyDestination.original.copyProgress = `${bytesCopied}/${sourceProxy.original.size}`; - }); - to.on('error', (err) => { - blobProxyDestination.original.copyStatus = CopyStatus.FAILED; - blobProxyDestination.original.copyStatusDescription = err.message; - blobProxyDestination.original.completionTime = new Date().toGMTString(); - CopyOperationsManager.clear(copyId); - to.end(); - }); + copyBlob(request) { + const sourceProxy = this._getCopySourceProxy(request); + let from = null, + to = null; - const response = new AzuriteResponse({ proxy: blobProxyDestination, cors: request.cors }); - return BbPromise.resolve(response); - } + from = fs.createReadStream(sourceProxy.original.uri); + // TODO: if blob type is block also copy committed blocks + to = fs.createWriteStream(env.diskStorageUri(request.id)); + from.pipe(to); - abortCopyBlob(request) { - return CopyOperationsManager.cancel(request.copyId) - .then(() => { - return new AzuriteResponse({ cors: request.cors }); - }); - } + request.entityType = sourceProxy.original.entityType; - setBlobServiceProperties(request) { - const coll = this.db.getCollection(StorageTables.ServiceProperties); - const settings = coll.where((e) => { - return true; // there is always at most one entry in this collection - })[0]; - const updatedSettings = request.payload.StorageServiceProperties; - if (!settings) { - coll.insert(request.payload); - } else { - if (updatedSettings.Logging) { - settings.StorageServiceProperties.Logging = updatedSettings.Logging; - } - if (updatedSettings.HourMetrics) { - settings.StorageServiceProperties.HourMetrics = updatedSettings.HourMetrics; - } - if (updatedSettings.MinuteMetrics) { - settings.StorageServiceProperties.MinuteMetrics = updatedSettings.MinuteMetrics; - } - if (updatedSettings.Cors) { - settings.StorageServiceProperties.Cors = updatedSettings.Cors; - } - if (updatedSettings.DefaultServiceVersion) { - settings.StorageServiceProperties.DefaultServiceVersion = updatedSettings.DefaultServiceVersion; - } - coll.update(settings); + const coll = this.db.getCollection(request.containerName), + blobProxyDestination = this._createOrUpdateBlob(coll, request), + copyId = uuidv4(); + + blobProxyDestination.original.copyProgress = `0/${ + sourceProxy.original.size + }`; + blobProxyDestination.original.copyStatus = CopyStatus.PENDING; + blobProxyDestination.original.copyStatusDescription = ""; + blobProxyDestination.original.copyId = copyId; + CopyOperationsManager.add(copyId, from, to, env.diskStorageUri(request.id)); + let bytesCopied = 0; + to.on("finish", () => { + if (blobProxyDestination.original.copyStatus !== CopyStatus.FAILED) { + blobProxyDestination.original.copyProgress = `${ + sourceProxy.original.size + }/${sourceProxy.original.size}`; + blobProxyDestination.original.copyCompletionTime = new Date().toGMTString(); + blobProxyDestination.original.copyStatus = CopyStatus.SUCCESS; + delete blobProxyDestination.original.copyStatusDescription; + blobProxyDestination.original.copySource = sourceProxy.original.uri; + const { + sourceContainerName, + sourceBlobName, + } = request.copySourceName(); + // encode blobname in case there are unicode characters which are not supported by http headers per default + blobProxyDestination.original.copySource = `http://localhost/devstoreaccount1/${sourceContainerName}/${encodeURIComponent( + sourceBlobName + )}`; + blobProxyDestination.original.incrementalCopy = false; + blobProxyDestination.original.size = sourceProxy.original.size; + blobProxyDestination.original.entityType = + sourceProxy.original.entityType; + blobProxyDestination.original.md5 = sourceProxy.original.md5; + blobProxyDestination.original.contentType = + sourceProxy.original.contentType; + blobProxyDestination.original.contentEncoding = + sourceProxy.original.contentEncoding; + blobProxyDestination.original.contentLanguage = + sourceProxy.original.contentLanguage; + blobProxyDestination.original.cacheControl = + sourceProxy.original.cacheControl; + blobProxyDestination.original.contentDisposition = + sourceProxy.original.contentDisposition; + blobProxyDestination.original.metaProps = + Object.keys(request.metaProps).length > 0 + ? request.metaProps + : sourceProxy.original.metaProps; + if (sourceProxy.original.entityType === StorageEntityType.PageBlob) { + blobProxyDestination.original.sequenceNumber = + sourceProxy.original.sequenceNumber; } - return BbPromise.resolve(new AzuriteResponse({ cors: request.cors })); - } + if (sourceProxy.original.entityType === StorageEntityType.AppendBlob) { + blobProxyDestination.original[N.BLOB_COMMITTED_BLOCK_COUNT] = + sourceProxy.original[N.BLOB_COMMITTED_BLOCK_COUNT]; + } + CopyOperationsManager.clear(copyId); + coll.update(blobProxyDestination.release()); + } + }); + from.on("data", (chunk) => { + bytesCopied += chunk.length; + blobProxyDestination.original.copyProgress = `${bytesCopied}/${ + sourceProxy.original.size + }`; + }); + to.on("error", (err) => { + blobProxyDestination.original.copyStatus = CopyStatus.FAILED; + blobProxyDestination.original.copyStatusDescription = err.message; + blobProxyDestination.original.completionTime = new Date().toGMTString(); + CopyOperationsManager.clear(copyId); + to.end(); + }); - getBlobServiceProperties(request) { - const coll = this.db.getCollection(StorageTables.ServiceProperties); - const settings = coll.where((e) => { - return true; // there is always at most one entry in this collection - })[0]; + const response = new AzuriteResponse({ + proxy: blobProxyDestination, + cors: request.cors, + }); + return BbPromise.resolve(response); + } - return BbPromise.resolve(new AzuriteResponse({ payload: settings || {}, cors: request.cors })); - } + abortCopyBlob(request) { + return CopyOperationsManager.cancel(request.copyId).then(() => { + return new AzuriteResponse({ cors: request.cors }); + }); + } - _createOrUpdateBlob(coll, request) { - const blob = coll.chain().find({ 'id': { '$eq': request.id } }).data(); - if (blob.length > 0) { - coll.chain().find({ 'id': { '$eq': request.id } }).remove(); - } - const entity = StorageEntityGenerator.generateStorageEntity(request); - const blobProxy = new BlobProxy(coll.insert(entity), request.containerName); - coll.update(blobProxy.release()); - return blobProxy; + setBlobServiceProperties(request) { + const coll = this.db.getCollection(StorageTables.ServiceProperties); + const settings = coll.where((e) => { + return true; // there is always at most one entry in this collection + })[0]; + const updatedSettings = request.payload.StorageServiceProperties; + if (!settings) { + coll.insert(request.payload); + } else { + if (updatedSettings.Logging) { + settings.StorageServiceProperties.Logging = updatedSettings.Logging; + } + if (updatedSettings.HourMetrics) { + settings.StorageServiceProperties.HourMetrics = + updatedSettings.HourMetrics; + } + if (updatedSettings.MinuteMetrics) { + settings.StorageServiceProperties.MinuteMetrics = + updatedSettings.MinuteMetrics; + } + if (updatedSettings.Cors) { + settings.StorageServiceProperties.Cors = updatedSettings.Cors; + } + if (updatedSettings.DefaultServiceVersion) { + settings.StorageServiceProperties.DefaultServiceVersion = + updatedSettings.DefaultServiceVersion; + } + coll.update(settings); } + return BbPromise.resolve(new AzuriteResponse({ cors: request.cors })); + } - /** - * Precondition: Validation of container and blob - * - * @param {String} containerName - * @param {String} id - * @returns - * - * @memberOf StorageManager - */ - _getCollectionAndBlob(containerName, id) { - const coll = this.db.getCollection(containerName); - if (!coll) { - return { - coll: undefined, - blobProxy: undefined - }; - } - const result = coll.chain() - .find({ id: id }) - .data(); - return { - coll: coll, - blobProxy: (result.length === 0) ? undefined : new BlobProxy(result[0], containerName) - }; + getBlobServiceProperties(request) { + const coll = this.db.getCollection(StorageTables.ServiceProperties); + const settings = coll.where((e) => { + return true; // there is always at most one entry in this collection + })[0]; + + return BbPromise.resolve( + new AzuriteResponse({ payload: settings || {}, cors: request.cors }) + ); + } + + _createOrUpdateBlob(coll, request) { + const blob = coll + .chain() + .find({ id: { $eq: request.id } }) + .data(); + if (blob.length > 0) { + coll + .chain() + .find({ id: { $eq: request.id } }) + .remove(); } + const entity = StorageEntityGenerator.generateStorageEntity(request); + const blobProxy = new BlobProxy(coll.insert(entity), request.containerName); + coll.update(blobProxy.release()); + return blobProxy; + } - /** - * Precondition: Validation of container - * - * @param {String} containerName - * @returns - * - * @memberOf StorageManager - */ - _getCollectionAndContainer(containerName) { - const coll = this.db.getCollection(StorageTables.Containers); - const result = coll.chain() - .find({ name: containerName }) - .data(); - return { - coll: coll, - containerProxy: (result.length === 0) ? undefined : new ContainerProxy(result[0]) - }; + /** + * Precondition: Validation of container and blob + * + * @param {String} containerName + * @param {String} id + * @returns + * + * @memberOf StorageManager + */ + _getCollectionAndBlob(containerName, id) { + const coll = this.db.getCollection(containerName); + if (!coll) { + return { + coll: undefined, + blobProxy: undefined, + }; } + const result = coll + .chain() + .find({ id: id }) + .data(); + return { + coll: coll, + blobProxy: + result.length === 0 + ? undefined + : new BlobProxy(result[0], containerName), + }; + } - _updatePageRanges(coll, pageRanges, startByte, endByte, id, isClear) { - const startAlignment = startByte / 512, - endAlignment = (endByte + 1) / 512; - coll.remove(pageRanges); - const firstPage = pageRanges[0]; - const lastPage = pageRanges[pageRanges.length - 1]; - if (isClear) { - // it's a clear operation - if (firstPage && startAlignment > firstPage.start) { - coll.insert({ - parentId: id, - start: firstPage.start, - end: startAlignment - }) - } - if (lastPage && endAlignment < lastPage.end) { - coll.insert({ - parentId: id, - start: endAlignment, - end: lastPage.end - }) - } - } else { - // it must be an update operation - let start, end - if (firstPage && startAlignment > firstPage.start) { - start = firstPage.start; - } else { - start = startAlignment; - } - if (lastPage && endAlignment < lastPage.end) { - end = lastPage.end; - } else { - end = endAlignment; - } + /** + * Precondition: Validation of container + * + * @param {String} containerName + * @returns + * + * @memberOf StorageManager + */ + _getCollectionAndContainer(containerName) { + const coll = this.db.getCollection(StorageTables.Containers); + const result = coll + .chain() + .find({ name: containerName }) + .data(); + return { + coll: coll, + containerProxy: + result.length === 0 ? undefined : new ContainerProxy(result[0]), + }; + } - coll.insert({ - parentId: id, - start: start, - end: end - }); - } - } + _updatePageRanges(coll, pageRanges, startByte, endByte, id, isClear) { + const startAlignment = startByte / 512, + endAlignment = (endByte + 1) / 512; + coll.remove(pageRanges); + const firstPage = pageRanges[0]; + const lastPage = pageRanges[pageRanges.length - 1]; + if (isClear) { + // it's a clear operation + if (firstPage && startAlignment > firstPage.start) { + coll.insert({ + parentId: id, + start: firstPage.start, + end: startAlignment, + }); + } + if (lastPage && endAlignment < lastPage.end) { + coll.insert({ + parentId: id, + start: endAlignment, + end: lastPage.end, + }); + } + } else { + // it must be an update operation + let start, end; + if (firstPage && startAlignment > firstPage.start) { + start = firstPage.start; + } else { + start = startAlignment; + } + if (lastPage && endAlignment < lastPage.end) { + end = lastPage.end; + } else { + end = endAlignment; + } - _getCopySourceProxy(request) { - // const { sourceContainerName, sourceBlobName, date } = request.copySourceName(); - const resp = request.copySourceName(), - sourceContainerName = resp.sourceContainerName, - sourceBlobName = resp.sourceBlobName, - date = resp.date; - if (date !== undefined) { - const blobProxy = this._getCollectionAndBlob(sourceContainerName, env.snapshotId(sourceContainerName, sourceBlobName, date)).blobProxy; - return blobProxy; - } - const blobProxy = this._getCollectionAndBlob(sourceContainerName, env.blobId(sourceContainerName, sourceBlobName)).blobProxy; - return blobProxy; + coll.insert({ + parentId: id, + start: start, + end: end, + }); } + } - _clearCopyMetaData(proxy) { - delete proxy.original.copyId; - delete proxy.original.copyStatus; - delete proxy.original.copyCompletionTime; - delete proxy.original.copyStatusDescription; - delete proxy.original.copyProgress; - delete proxy.original.copySource; - delete proxy.original.incrementalCopy; - delete proxy.original.copyDestinationSnapshot; + _getCopySourceProxy(request) { + // const { sourceContainerName, sourceBlobName, date } = request.copySourceName(); + const resp = request.copySourceName(), + sourceContainerName = resp.sourceContainerName, + sourceBlobName = resp.sourceBlobName, + date = resp.date; + if (date !== undefined) { + const blobProxy = this._getCollectionAndBlob( + sourceContainerName, + env.snapshotId(sourceContainerName, sourceBlobName, date) + ).blobProxy; + return blobProxy; } + const blobProxy = this._getCollectionAndBlob( + sourceContainerName, + env.blobId(sourceContainerName, sourceBlobName) + ).blobProxy; + return blobProxy; + } + + _clearCopyMetaData(proxy) { + delete proxy.original.copyId; + delete proxy.original.copyStatus; + delete proxy.original.copyCompletionTime; + delete proxy.original.copyStatusDescription; + delete proxy.original.copyProgress; + delete proxy.original.copySource; + delete proxy.original.incrementalCopy; + delete proxy.original.copyDestinationSnapshot; + } } -module.exports = new StorageManager; +module.exports = new StorageManager(); diff --git a/lib/core/cli.js b/lib/core/cli.js index 666ace458..aad3d714c 100644 --- a/lib/core/cli.js +++ b/lib/core/cli.js @@ -1,13 +1,14 @@ -'use strict'; +/** @format */ -const os = require('os'), - chalk = require('chalk'), - env = require('./env'); +"use strict"; + +const os = require("os"), + chalk = require("chalk"), + env = require("./env"); exports.asciiGreeting = () => { - const version = require('./../../package.json').version; - let art = -` + const version = require("./../../package.json").version; + let art = ` _______ _ (_______) (_) _ _______ _____ _ _ ____ _ _| |_ _____ @@ -15,20 +16,32 @@ exports.asciiGreeting = () => { | | | |/ __/| |_| | | | | | |_| ____| |_| |_(_____)____/|_| |_| \\__)_____) -` - art += 'Azurite, Version ' + version + os.EOL; - art += 'A lightweight server clone of Azure Storage' + os.EOL; - console.log(chalk.cyan(art)); -} +`; + art += "Azurite, Version " + version + os.EOL; + art += "A lightweight server clone of Azure Storage" + os.EOL; + console.log(chalk.cyan(art)); +}; exports.blobStorageStatus = () => { - console.log(chalk.cyan(`Azure Blob Storage Emulator listening on port ${env.blobStoragePort}`)); -} + console.log( + chalk.cyan( + `Azure Blob Storage Emulator listening on port ${env.blobStoragePort}` + ) + ); +}; exports.queueStorageStatus = () => { - console.log(chalk.cyan(`Azure Queue Storage Emulator listening on port ${env.queueStoragePort}`)); -} + console.log( + chalk.cyan( + `Azure Queue Storage Emulator listening on port ${env.queueStoragePort}` + ) + ); +}; exports.tableStorageStatus = () => { - console.log(chalk.cyan(`Azure Table Storage Emulator listening on port ${env.tableStoragePort}`)); -} \ No newline at end of file + console.log( + chalk.cyan( + `Azure Table Storage Emulator listening on port ${env.tableStoragePort}` + ) + ); +}; diff --git a/lib/core/env.js b/lib/core/env.js index ad01bb5d1..3b674cbee 100644 --- a/lib/core/env.js +++ b/lib/core/env.js @@ -1,81 +1,106 @@ -'use strict'; +/** @format */ -const utils = require('./utils'), - path = require('path'), - BbPromise = require('bluebird'), - crypto = require('crypto'), - fs = BbPromise.promisifyAll(require("fs-extra")); +"use strict"; + +const utils = require("./utils"), + path = require("path"), + BbPromise = require("bluebird"), + crypto = require("crypto"), + fs = BbPromise.promisifyAll(require("fs-extra")); let initialized = false; class Environment { - constructor() { - } + constructor() {} - init(options) { - if (initialized && !options.overwrite) { - return BbPromise.resolve(); - } - initialized = true; - this.azuriteWorkspacePath = options.l || options.location || process.cwd(); - this.azuriteRootPath = path.join(__dirname, '../..'); - this.silent = options.s || options.silent; - this.accountAuth = options.a || options.accountAuth; - this.dbNameBlob = '__azurite_db_blob__.json'; - this.dbNameTable = '__azurite_db_table__.json'; - this.localStoragePath = path.join(this.azuriteWorkspacePath, '__blobstorage__'); - this.azuriteDBPathBlob = path.join(this.azuriteWorkspacePath, this.dbNameBlob); - this.azuriteDBPathTable = path.join(this.azuriteWorkspacePath, this.dbNameTable); - this.emulatedStorageAccountName = 'devstoreaccount1'; - this.blobStoragePort = options.p || options.blobPort || 10000; - this.queueStoragePort = options.q || options.queuePort || 10001; - this.tableStoragePort = options.t || options.tablePort || 10002; - this.blobModulePath = path.join(this.azuriteRootPath, 'bin', 'blob'); - this.queueModulePath = path.join(this.azuriteRootPath, 'bin', 'queue'); - this.tableModulePath = path.join(this.azuriteRootPath, 'bin', 'table'); - return fs.mkdirsAsync(this.localStoragePath); + init(options) { + if (initialized && !options.overwrite) { + return BbPromise.resolve(); } + initialized = true; + this.azuriteWorkspacePath = options.l || options.location || process.cwd(); + this.azuriteRootPath = path.join(__dirname, "../.."); + this.silent = options.s || options.silent; + this.accountAuth = options.a || options.accountAuth; + this.dbNameBlob = "__azurite_db_blob__.json"; + this.dbNameTable = "__azurite_db_table__.json"; + this.localStoragePath = path.join( + this.azuriteWorkspacePath, + "__blobstorage__" + ); + this.azuriteDBPathBlob = path.join( + this.azuriteWorkspacePath, + this.dbNameBlob + ); + this.azuriteDBPathTable = path.join( + this.azuriteWorkspacePath, + this.dbNameTable + ); + this.emulatedStorageAccountName = "devstoreaccount1"; + this.blobStoragePort = options.p || options.blobPort || 10000; + this.queueStoragePort = options.q || options.queuePort || 10001; + this.tableStoragePort = options.t || options.tablePort || 10002; + this.blobModulePath = path.join(this.azuriteRootPath, "bin", "blob"); + this.queueModulePath = path.join(this.azuriteRootPath, "bin", "queue"); + this.tableModulePath = path.join(this.azuriteRootPath, "bin", "table"); + return fs.mkdirsAsync(this.localStoragePath); + } - /** - * Based on the request it creates the according URI that is served by Azurite's internal web interface - * directly powered by Node's static file server. - * - * The id is hashed to avoid base64-encoded filenames (i.e. ids) longer than 255 characters which is not supported on some file systems. - * - * @param {string} id of the blob - * - * @memberof Environment - * */ - webStorageUri(id) { - const hash = crypto.createHash('sha1').update(id).digest('base64').replace(/\//g, '_'); - return `http://localhost:${this.blobStoragePort}/blobs/${hash}`; - } + /** + * Based on the request it creates the according URI that is served by Azurite's internal web interface + * directly powered by Node's static file server. + * + * The id is hashed to avoid base64-encoded filenames (i.e. ids) longer than 255 characters which is not supported on some file systems. + * + * @param {string} id of the blob + * + * @memberof Environment + * */ + webStorageUri(id) { + const hash = crypto + .createHash("sha1") + .update(id) + .digest("base64") + .replace(/\//g, "_"); + return `http://localhost:${this.blobStoragePort}/blobs/${hash}`; + } - /** - * Creates the full path to the location of a blob on disk based on its ID. - * - * @param {any} id - * @returns full path to blob on disk - * @memberof Environment - */ - diskStorageUri(id) { - const hash = crypto.createHash('sha1').update(id).digest('base64').replace(/\//g, '_'); - return path.join(this.localStoragePath, hash); - } + /** + * Creates the full path to the location of a blob on disk based on its ID. + * + * @param {any} id + * @returns full path to blob on disk + * @memberof Environment + */ + diskStorageUri(id) { + const hash = crypto + .createHash("sha1") + .update(id) + .digest("base64") + .replace(/\//g, "_"); + return path.join(this.localStoragePath, hash); + } - // We prepend a specific character to guarantee unique ids. - // This is neccessary since otherwise snapshot IDs could overlap with block IDs could overlap with block-/append-/page-blob IDs. - blobId(containerName, blobName) { - return Buffer.from(`A${containerName}${blobName}`, 'utf8').toString('base64'); - } + // We prepend a specific character to guarantee unique ids. + // This is neccessary since otherwise snapshot IDs could overlap with block IDs could overlap with block-/append-/page-blob IDs. + blobId(containerName, blobName) { + return Buffer.from(`A${containerName}${blobName}`, "utf8").toString( + "base64" + ); + } - blockId(containerName, blobName, blockId) { - return Buffer.from(`B${containerName}${blobName}${blockId}`, 'utf8').toString('base64'); - } + blockId(containerName, blobName, blockId) { + return Buffer.from( + `B${containerName}${blobName}${blockId}`, + "utf8" + ).toString("base64"); + } - snapshotId(containerName, blobName, date) { - return Buffer.from(`C${containerName}${blobName}${date}`, 'utf8').toString('base64'); - } + snapshotId(containerName, blobName, date) { + return Buffer.from(`C${containerName}${blobName}${date}`, "utf8").toString( + "base64" + ); + } } module.exports = new Environment(); diff --git a/lib/core/queue/QueueManager.js b/lib/core/queue/QueueManager.js index ce8474e0c..8e325fcb6 100644 --- a/lib/core/queue/QueueManager.js +++ b/lib/core/queue/QueueManager.js @@ -1,61 +1,70 @@ -'use strict'; +/** @format */ -const Queue = require('./../../model/queue/Queue'); +"use strict"; + +const Queue = require("./../../model/queue/Queue"); /** * Manages the lifecycle of all queues in memory. Queues are not persisted in Azurite. - * + * * @class QueueManager */ class QueueManager { - constructor() { - this.queues = {}; - } + constructor() { + this.queues = {}; + } - add({ name, metaProps = {} }) { - this.queues[name] = new Queue(metaProps); - } + add({ name, metaProps = {} }) { + this.queues[name] = new Queue(metaProps); + } - delete(name) { - delete this.queues[name]; - } + delete(name) { + delete this.queues[name]; + } - getQueueAndMessage({ queueName = undefined, messageId = undefined }) { - const queue = this.queues[queueName]; - let message = undefined; - if (queue !== undefined && messageId !== undefined) { - message = queue.getMessage(messageId); - } - return { - queue: queue, - message: message - }; + getQueueAndMessage({ queueName = undefined, messageId = undefined }) { + const queue = this.queues[queueName]; + let message = undefined; + if (queue !== undefined && messageId !== undefined) { + message = queue.getMessage(messageId); } + return { + queue: queue, + message: message, + }; + } - listQueues({ prefix = '', marker = 0, maxresults = 5000 }) { - const filteredQueues = Object.keys(this.queues) - .filter((queueName) => { - return queueName.startsWith(prefix); - }) - .reduce((list, queueName) => { - list.push({ name: queueName, metaProps: this.queues[queueName].metaProps }); - return list; - }, []) - .sort((lhs, rhs) => { - return lhs.name > rhs.name; - }); - - const paginatedQueues = filteredQueues.slice(marker * maxresults, (marker + 1) * maxresults); - return { - queues: paginatedQueues, - nextMarker: (this.queues.length > (marker + 1) * maxresults) ? marker + 1 : undefined - } - } + listQueues({ prefix = "", marker = 0, maxresults = 5000 }) { + const filteredQueues = Object.keys(this.queues) + .filter((queueName) => { + return queueName.startsWith(prefix); + }) + .reduce((list, queueName) => { + list.push({ + name: queueName, + metaProps: this.queues[queueName].metaProps, + }); + return list; + }, []) + .sort((lhs, rhs) => { + return lhs.name > rhs.name; + }); - setQueueMetadata(request) { - const { queue } = this.getQueueAndMessage({ queueName: request.queueName }); - queue.metaProps = request.metaProps; - } + const paginatedQueues = filteredQueues.slice( + marker * maxresults, + (marker + 1) * maxresults + ); + return { + queues: paginatedQueues, + nextMarker: + this.queues.length > (marker + 1) * maxresults ? marker + 1 : undefined, + }; + } + + setQueueMetadata(request) { + const { queue } = this.getQueueAndMessage({ queueName: request.queueName }); + queue.metaProps = request.metaProps; + } } -module.exports = new QueueManager(); \ No newline at end of file +module.exports = new QueueManager(); diff --git a/lib/core/table/TableStorageManager.js b/lib/core/table/TableStorageManager.js index d36abe7c4..7cf09810d 100644 --- a/lib/core/table/TableStorageManager.js +++ b/lib/core/table/TableStorageManager.js @@ -1,227 +1,305 @@ -'use strict'; - -const Loki = require('lokijs'), - BbPromise = require('bluebird'), - fs = require("fs-extra"), - fsn = BbPromise.promisifyAll(require("fs")), - AzuriteTableResponse = require('./../../model/table/AzuriteTableResponse'), - TableProxy = require('./../../model/table/TableProxy'), - EntityProxy = require('./../../model/table/EntityProxy'), - EntityGenerator = require('./../../model/table/EntityGenerator'), - Tables = require('./../Constants').TableStorageTables, - env = require('./../../core/env'); +/** @format */ + +"use strict"; + +const Loki = require("lokijs"), + BbPromise = require("bluebird"), + fs = require("fs-extra"), + fsn = BbPromise.promisifyAll(require("fs")), + AzuriteTableResponse = require("./../../model/table/AzuriteTableResponse"), + TableProxy = require("./../../model/table/TableProxy"), + EntityProxy = require("./../../model/table/EntityProxy"), + EntityGenerator = require("./../../model/table/EntityGenerator"), + Tables = require("./../Constants").TableStorageTables, + env = require("./../../core/env"), + AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); class TableStorageManager { - constructor() { - } + constructor() {} - init() { - this.db = BbPromise.promisifyAll(new Loki(env.azuriteDBPathTable, { autosave: true, autosaveInterval: 5000 })); - return fsn.statAsync(env.azuriteDBPathTable) - .then((stat) => { - return this.db.loadDatabaseAsync({}); - }) - .then((data) => { - if (!this.db.getCollection(Tables.Tables)) { - this.db.addCollection(Tables.Tables); - } - return this.db.saveDatabaseAsync(); - }) - .catch((e) => { - if (e.code === 'ENOENT') { - // No DB has been persisted / initialized yet. - this.db.addCollection(Tables.Tables); - return this.db.saveDatabaseAsync(); - } - // This should never happen! - console.error(`Failed to initialize database at "${env.azuriteDBPathTable}"`); - throw e; - }); - } + init() { + this.db = BbPromise.promisifyAll( + new Loki(env.azuriteDBPathTable, { + autosave: true, + autosaveInterval: 5000, + }) + ); + return fsn + .statAsync(env.azuriteDBPathTable) + .then((stat) => { + return this.db.loadDatabaseAsync({}); + }) + .then((data) => { + if (!this.db.getCollection(Tables.Tables)) { + this.db.addCollection(Tables.Tables); + } + return this.db.saveDatabaseAsync(); + }) + .catch((e) => { + if (e.code === "ENOENT") { + // No DB has been persisted / initialized yet. + this.db.addCollection(Tables.Tables); + return this.db.saveDatabaseAsync(); + } + // This should never happen! + console.error( + `Failed to initialize database at "${env.azuriteDBPathTable}"` + ); + throw e; + }); + } - createTable(request) { - this.db.addCollection(request.tableName); - const coll = this.db.getCollection(Tables.Tables); - const tableEntity = EntityGenerator.generateTable(request.tableName); - const proxy = new TableProxy(coll.insert(tableEntity)); - return BbPromise.resolve(new AzuriteTableResponse({ proxy: proxy })); - } + createTable(request) { + this.db.addCollection(request.tableName); + const coll = this.db.getCollection(Tables.Tables); + const tableEntity = EntityGenerator.generateTable(request.tableName); + const proxy = new TableProxy(coll.insert(tableEntity)); + return BbPromise.resolve(new AzuriteTableResponse({ proxy: proxy })); + } + + insertEntity(request) { + const proxy = this._createOrUpdateEntity( + request.partitionKey, + request.rowKey, + request.tableName, + request.payload + ); + return BbPromise.resolve(new AzuriteTableResponse({ proxy: proxy })); + } - insertEntity(request) { - const proxy = this._createOrUpdateEntity(request.partitionKey, request.rowKey, request.tableName, request.payload); - return BbPromise.resolve(new AzuriteTableResponse({ proxy: proxy })); + deleteTable(request) { + const coll = this.db.getCollection(Tables.Tables); + coll + .chain() + .find({ name: { $eq: request.tableName } }) + .remove(); + this.db.removeCollection(request.tableName); + return BbPromise.resolve(new AzuriteTableResponse({})); + } + + deleteEntity(request) { + this._deleteEntity(request.tableName, request.partitionKey, request.rowKey); + return BbPromise.resolve(new AzuriteTableResponse({})); + } + + queryTable(request) { + const coll = this.db.getCollection(Tables.Tables); + const payload = []; + if (request.tableName !== undefined) { + const result = coll + .chain() + .find({ name: request.tableName }) + .limit(request.top) + .data(); + // there must be a table since we are validating its existence in validation pipeline + payload.push(new TableProxy(result[0])); + return BbPromise.resolve(new AzuriteTableResponse({ payload: payload })); } - deleteTable(request) { - const coll = this.db.getCollection(Tables.Tables); - coll.chain().find({ name: { '$eq': request.tableName } }).remove(); - this.db.removeCollection(request.tableName); - return BbPromise.resolve(new AzuriteTableResponse({})); + let result; + if (request.filter !== undefined) { + result = coll + .chain() + .where((item) => { + return eval(request.filter); + }) + .limit(request.top) + .data(); + } else { + // Returning all tables + result = coll + .chain() + .find({}) + .limit(request.top) + .data(); } + for (const table of result) { + payload.push(new TableProxy(table)); + } + return BbPromise.resolve(new AzuriteTableResponse({ payload: payload })); + } - deleteEntity(request) { - this._deleteEntity(request.tableName, request.partitionKey, request.rowKey); - return BbPromise.resolve(new AzuriteTableResponse({})); + queryEntities(request) { + const coll = this.db.getCollection(request.tableName); + const findExpr = {}; + if (request.partitionKey) { + findExpr["partitionKey"] = request.partitionKey; + } + if (request.rowKey) { + findExpr["rowKey"] = request.rowKey; + } + let find; + if (request.filter) { + find = coll + .chain() + .find(findExpr) + .where((item) => { + return eval(request.filter); + }) + .data(); + } else { + find = coll + .chain() + .find(findExpr) + .data(); } - queryTable(request) { - const coll = this.db.getCollection(Tables.Tables); - const payload = []; - if (request.tableName !== undefined) { - const result = coll.chain() - .find({ name: request.tableName }) - .limit(request.top) - .data(); - // there must be a table since we are validating its existence in validation pipeline - payload.push(new TableProxy(result[0])); - return BbPromise.resolve(new AzuriteTableResponse({ payload: payload })); - } + if (find.length == 0) { + throw new AError(ErrorCodes.EntityNotFound); + } - let result; - if (request.filter !== undefined) { - result = coll.chain() - .where((item) => { - return eval(request.filter) - }) - .limit(request.top) - .data(); - } else { // Returning all tables - result = coll.chain() - .find({}) - .limit(request.top) - .data(); - } - for (const table of result) { - payload.push(new TableProxy(table)); - } - return BbPromise.resolve(new AzuriteTableResponse({ payload: payload })); + let payload = []; + for (const item of find) { + payload.push(new EntityProxy(item)); } - queryEntities(request) { - const coll = this.db.getCollection(request.tableName); - const findExpr = {}; - if (request.partitionKey) { - findExpr['partitionKey'] = request.partitionKey; - } - if (request.rowKey) { - findExpr['rowKey'] = request.rowKey; - } + return BbPromise.resolve(new AzuriteTableResponse({ payload: payload })); + } - const chain = coll.chain().find(findExpr); - if (request.filter) { - chain.where((item) => { - return eval(request.filter) - }); - } - const result = chain.limit(request.top).data(); - const payload = []; - for (const item of result) { - payload.push(new EntityProxy(item)); - } + updateEntity(request) { + const proxy = this._createOrUpdateEntity( + request.partitionKey, + request.rowKey, + request.tableName, + request.payload + ); + return BbPromise.resolve(new AzuriteTableResponse({ proxy: proxy })); + } - return BbPromise.resolve(new AzuriteTableResponse({ payload: payload })); - } + insertOrReplaceEntity(request) { + const proxy = this._createOrUpdateEntity( + request.partitionKey, + request.rowKey, + request.tableName, + request.payload + ); + return BbPromise.resolve(new AzuriteTableResponse({ proxy: proxy })); + } - updateEntity(request) { - const proxy = this._createOrUpdateEntity(request.partitionKey, request.rowKey, request.tableName, request.payload); - return BbPromise.resolve(new AzuriteTableResponse({ proxy: proxy })); - } + mergeEntity(request) { + const proxy = this._insertOrMergeEntity( + request.partitionKey, + request.rowKey, + request.tableName, + request.payload + ); + return BbPromise.resolve(new AzuriteTableResponse({ proxy: proxy })); + } - insertOrReplaceEntity(request) { - const proxy = this._createOrUpdateEntity(request.partitionKey, request.rowKey, request.tableName, request.payload); - return BbPromise.resolve(new AzuriteTableResponse({ proxy: proxy })); - } + insertOrMergeEntity(request) { + const proxy = this._insertOrMergeEntity( + request.partitionKey, + request.rowKey, + request.tableName, + request.payload + ); + return BbPromise.resolve(new AzuriteTableResponse({ proxy: proxy })); + } - mergeEntity(request) { - const proxy = this._insertOrMergeEntity(request.partitionKey, request.rowKey, request.tableName, request.payload); - return BbPromise.resolve(new AzuriteTableResponse({ proxy: proxy })); - } + _getTable(name) { + const coll = this.db.getCollection(Tables.Tables); + const result = coll + .chain() + .find({ name: name }) + .data(); + return result.length === 0 ? undefined : new TableProxy(result[0]); + } - insertOrMergeEntity(request) { - const proxy = this._insertOrMergeEntity(request.partitionKey, request.rowKey, request.tableName, request.payload); - return BbPromise.resolve(new AzuriteTableResponse({ proxy: proxy })); - } + _deleteEntity(tableName, partitionKey, rowKey) { + const coll = this.db.getCollection(tableName), + result = coll + .chain() + .find({ + $and: [ + { + partitionKey: { $eq: partitionKey }, + }, + { + rowKey: { $eq: rowKey }, + }, + ], + }) + .remove(); + } - _getTable(name) { - const coll = this.db.getCollection(Tables.Tables); - const result = coll.chain() - .find({ name: name }) - .data(); - return (result.length === 0) ? undefined : new TableProxy(result[0]); + _getEntity(tableName, partitionKey, rowKey) { + const coll = this.db.getCollection(tableName); + if (coll === null) { + return undefined; } + const result = coll + .chain() + .find({ + $and: [ + { + partitionKey: { $eq: partitionKey }, + }, + { + rowKey: { $eq: rowKey }, + }, + ], + }) + .data(); + return result.length === 0 ? undefined : new EntityProxy(result[0]); + } - _deleteEntity(tableName, partitionKey, rowKey) { - const coll = this.db.getCollection(tableName), - result = coll.chain() - .find({ - '$and': - [ - { - partitionKey: { '$eq': partitionKey } - }, - { - rowKey: { '$eq': rowKey } - } - ] - }) - .remove(); - } + _createOrUpdateEntity(partitionKey, rowKey, tableName, rawEntity) { + const coll = this.db.getCollection(tableName), + entity = EntityGenerator.generateEntity( + rawEntity, + tableName, + partitionKey, + rowKey + ), + res = coll.findOne({ partitionKey: partitionKey, rowKey: rowKey }); - _getEntity(tableName, partitionKey, rowKey) { - const coll = this.db.getCollection(tableName); - if (coll === null) { - return undefined; - } - const result = coll.chain() - .find({ - '$and': - [ - { - partitionKey: { '$eq': partitionKey } - }, - { - rowKey: { '$eq': rowKey } - } - ] - }) - .data(); - return (result.length === 0) ? undefined : new EntityProxy(result[0]); + if (res !== null) { + res.attribs = entity.attribs; + res.odata = entity.odata; + coll.update(res); + return new EntityProxy(res); } + const entityProxy = new EntityProxy(coll.insert(entity)); + return entityProxy; + } - _createOrUpdateEntity(partitionKey, rowKey, tableName, rawEntity) { - const coll = this.db.getCollection(tableName), - entity = EntityGenerator.generateEntity(rawEntity, tableName), - res = coll.findOne({ partitionKey: partitionKey, rowKey: rowKey }); + _insertOrMergeEntity(partitionKey, rowKey, tableName, rawEntity) { + const coll = this.db.getCollection(tableName), + entity = EntityGenerator.generateEntity( + rawEntity, + tableName, + partitionKey, + rowKey + ), + res = coll.findOne({ partitionKey: partitionKey, rowKey: rowKey }); - if (res !== null) { - res.attribs = entity.attribs; - res.odata = entity.odata; - coll.update(res); - return new EntityProxy(res); + if (res !== null) { + // A property cannot be removed with a Merge Entity operation (in contrast to an update operation). + for (const key of Object.keys(entity.attribs)) { + if (entity.attribs[key]) { + res.attribs[key] = entity.attribs[key]; } - const entityProxy = new EntityProxy(coll.insert(entity)); - return entityProxy; + } + res.odata = entity.odata; + coll.update(res); + return new EntityProxy(res); } + return this._createOrUpdateEntity( + partitionKey, + rowKey, + tableName, + rawEntity + ); + } - _insertOrMergeEntity(partitionKey, rowKey, tableName, rawEntity) { - const coll = this.db.getCollection(tableName), - entity = EntityGenerator.generateEntity(rawEntity, tableName), - res = coll.findOne({ partitionKey: partitionKey, rowKey: rowKey }); - - if (res !== null) { - // A property cannot be removed with a Merge Entity operation (in contrast to an update operation). - for (const key of Object.keys(entity.attribs)) { - if (entity.attribs[key]) { - res.attribs[key] = entity.attribs[key]; - } - } - res.odata = entity.odata; - coll.update(res); - return new EntityProxy(res); - } - return this._createOrUpdateEntity(partitionKey, rowKey, tableName, rawEntity); - } -} + flush() { + return this.db.saveDatabaseAsync(); + } + close() { + return this.db.close(); + } +} -module.exports = new TableStorageManager(); \ No newline at end of file +module.exports = new TableStorageManager(); diff --git a/lib/core/utils.js b/lib/core/utils.js index 09202c1d3..f9b34e159 100644 --- a/lib/core/utils.js +++ b/lib/core/utils.js @@ -1,11 +1,13 @@ -'use strict'; +/** @format */ -const crypto = require('crypto'); +"use strict"; + +const crypto = require("crypto"); exports.computeEtag = (templateString) => { - return crypto - .createHash('sha1') - .update(templateString, 'utf8') - .digest('base64') - .replace(/=+$/, ''); -} \ No newline at end of file + return crypto + .createHash("sha1") + .update(templateString, "utf8") + .digest("base64") + .replace(/=+$/, ""); +}; diff --git a/lib/middleware/blob/actions.js b/lib/middleware/blob/actions.js index 2e0428584..aa86e7f6f 100644 --- a/lib/middleware/blob/actions.js +++ b/lib/middleware/blob/actions.js @@ -1,171 +1,172 @@ -'use strict'; - -const BbPromise = require('bluebird'), - Operations = require('./../../core/Constants').Operations, - // Actions - createContainer = require('./../../actions/blob/CreateContainer'), - deleteBlob = require('./../../actions/blob/DeleteBlob'), - deleteContainer = require('./../../actions/blob/DeleteContainer'), - getBlob = require('./../../actions/blob/GetBlob'), - getBlobMetadata = require('./../../actions/blob/GetBlobMetadata'), - getBlobProperties = require('./../../actions/blob/GetBlobProperties'), - getBlockList = require('./../../actions/blob/GetBlockList'), - getContainerAcl = require('./../../actions/blob/GetContainerAcl'), - getContainerMetadata = require('./../../actions/blob/GetContainerMetadata'), - getContainerProperties = require('./../../actions/blob/GetContainerProperties'), - getPageRanges = require('./../../actions/blob/GetPageRanges'), - leaseBlob = require('./../../actions/blob/LeaseBlob'), - leaseContainer = require('./../../actions/blob/LeaseContainer'), - listBlobs = require('./../../actions/blob/ListBlobs'), - listContainers = require('./../../actions/blob/ListContainers'), - putAppendBlock = require('./../../actions/blob/PutAppendBlock'), - putBlob = require('./../../actions/blob/PutBlob'), - putBlock = require('./../../actions/blob/PutBlock'), - putBlockList = require('./../../actions/blob/PutBlockList'), - putPage = require('./../../actions/blob/PutPage'), - setBlobMetadata = require('./../../actions/blob/SetBlobMetadata'), - setBlobProperties = require('./../../actions/blob/SetBlobProperties'), - setBlobServiceProperties = require('./../../actions/blob/SetBlobServiceProperties'), - getBlobServiceProperties = require('./../../actions/blob/GetBlobServiceProperties'), - preflightBlobRequest = require('./../../actions/blob/PreflightBlobRequest'), - setContainerAcl = require('./../../actions/blob/SetContainerAcl'), - setContainerMetadata = require('./../../actions/blob/SetContainerMetadata'), - snapshotBlob = require('./../../actions/blob/SnapshotBlob'), - copyBlob = require('./../../actions/blob/CopyBlob'), - abortCopyBlob = require('./../../actions/blob/AbortCopyBlob'); - +/** @format */ + +"use strict"; + +const BbPromise = require("bluebird"), + Operations = require("./../../core/Constants").Operations, + // Actions + createContainer = require("./../../actions/blob/CreateContainer"), + deleteBlob = require("./../../actions/blob/DeleteBlob"), + deleteContainer = require("./../../actions/blob/DeleteContainer"), + getBlob = require("./../../actions/blob/GetBlob"), + getBlobMetadata = require("./../../actions/blob/GetBlobMetadata"), + getBlobProperties = require("./../../actions/blob/GetBlobProperties"), + getBlockList = require("./../../actions/blob/GetBlockList"), + getContainerAcl = require("./../../actions/blob/GetContainerAcl"), + getContainerMetadata = require("./../../actions/blob/GetContainerMetadata"), + getContainerProperties = require("./../../actions/blob/GetContainerProperties"), + getPageRanges = require("./../../actions/blob/GetPageRanges"), + leaseBlob = require("./../../actions/blob/LeaseBlob"), + leaseContainer = require("./../../actions/blob/LeaseContainer"), + listBlobs = require("./../../actions/blob/ListBlobs"), + listContainers = require("./../../actions/blob/ListContainers"), + putAppendBlock = require("./../../actions/blob/PutAppendBlock"), + putBlob = require("./../../actions/blob/PutBlob"), + putBlock = require("./../../actions/blob/PutBlock"), + putBlockList = require("./../../actions/blob/PutBlockList"), + putPage = require("./../../actions/blob/PutPage"), + setBlobMetadata = require("./../../actions/blob/SetBlobMetadata"), + setBlobProperties = require("./../../actions/blob/SetBlobProperties"), + setBlobServiceProperties = require("./../../actions/blob/SetBlobServiceProperties"), + getBlobServiceProperties = require("./../../actions/blob/GetBlobServiceProperties"), + preflightBlobRequest = require("./../../actions/blob/PreflightBlobRequest"), + setContainerAcl = require("./../../actions/blob/SetContainerAcl"), + setContainerMetadata = require("./../../actions/blob/SetContainerMetadata"), + snapshotBlob = require("./../../actions/blob/SnapshotBlob"), + copyBlob = require("./../../actions/blob/CopyBlob"), + abortCopyBlob = require("./../../actions/blob/AbortCopyBlob"); module.exports = (req, res) => { - BbPromise.try(() => { - actions[req.azuriteOperation](req.azuriteRequest, res); - }).catch((e) => { - res.status(e.statusCode || 500).send(e.message); - if (!e.statusCode) throw e; - }); -} + BbPromise.try(() => { + actions[req.azuriteOperation](req.azuriteRequest, res); + }).catch((e) => { + res.status(e.statusCode || 500).send(e.message); + if (!e.statusCode) throw e; + }); +}; const actions = {}; actions[undefined] = (request, res) => { - res.status(501).send('Not Implemented yet.'); -} + res.status(501).send("Not Implemented yet."); +}; actions[Operations.Account.PREFLIGHT_BLOB_REQUEST] = (request, res) => { - preflightBlobRequest.process(request, res); -} + preflightBlobRequest.process(request, res); +}; actions[Operations.Account.SET_BLOB_SERVICE_PROPERTIES] = (request, res) => { - setBlobServiceProperties.process(request, res); -} + setBlobServiceProperties.process(request, res); +}; actions[Operations.Account.GET_BLOB_SERVICE_PROPERTIES] = (request, res) => { - getBlobServiceProperties.process(request, res); -} + getBlobServiceProperties.process(request, res); +}; actions[Operations.Account.LIST_CONTAINERS] = (request, res) => { - listContainers.process(request, res); -} + listContainers.process(request, res); +}; actions[Operations.Container.CREATE_CONTAINER] = (request, res) => { - createContainer.process(request, res); -} + createContainer.process(request, res); +}; actions[Operations.Container.DELETE_CONTAINER] = (request, res) => { - deleteContainer.process(request, res); -} + deleteContainer.process(request, res); +}; actions[Operations.Blob.PUT_BLOB] = (request, res) => { - putBlob.process(request, res); -} + putBlob.process(request, res); +}; actions[Operations.Blob.APPEND_BLOCK] = (request, res) => { - putAppendBlock.process(request, res); -} + putAppendBlock.process(request, res); +}; actions[Operations.Blob.DELETE_BLOB] = (request, res) => { - deleteBlob.process(request, res); -} + deleteBlob.process(request, res); +}; actions[Operations.Blob.GET_BLOB] = (request, res) => { - getBlob.process(request, res); -} + getBlob.process(request, res); +}; actions[Operations.Container.LIST_BLOBS] = (request, res) => { - listBlobs.process(request, res); -} + listBlobs.process(request, res); +}; actions[Operations.Blob.PUT_BLOCK] = (request, res) => { - putBlock.process(request, res); -} + putBlock.process(request, res); +}; actions[Operations.Blob.PUT_BLOCK_LIST] = (request, res) => { - putBlockList.process(request, res); -} + putBlockList.process(request, res); +}; actions[Operations.Blob.GET_BLOCK_LIST] = (request, res) => { - getBlockList.process(request, res); -} + getBlockList.process(request, res); +}; actions[Operations.Blob.SET_BLOB_METADATA] = (request, res) => { - setBlobMetadata.process(request, res); -} + setBlobMetadata.process(request, res); +}; actions[Operations.Blob.GET_BLOB_METADATA] = (request, res) => { - getBlobMetadata.process(request, res); -} + getBlobMetadata.process(request, res); +}; actions[Operations.Blob.GET_BLOB_PROPERTIES] = (request, res) => { - getBlobProperties.process(request, res); -} + getBlobProperties.process(request, res); +}; actions[Operations.Blob.SET_BLOB_PROPERTIES] = (request, res) => { - setBlobProperties.process(request, res); -} + setBlobProperties.process(request, res); +}; actions[Operations.Container.SET_CONTAINER_METADATA] = (request, res) => { - setContainerMetadata.process(request, res); -} + setContainerMetadata.process(request, res); +}; actions[Operations.Container.GET_CONTAINER_METADATA] = (request, res) => { - getContainerMetadata.process(request, res); -} + getContainerMetadata.process(request, res); +}; actions[Operations.Container.GET_CONTAINER_PROPERTIES] = (request, res) => { - getContainerProperties.process(request, res); -} + getContainerProperties.process(request, res); +}; actions[Operations.Blob.PUT_PAGE] = (request, res) => { - putPage.process(request, res); -} + putPage.process(request, res); +}; actions[Operations.Blob.GET_PAGE_RANGES] = (request, res) => { - getPageRanges.process(request, res); -} + getPageRanges.process(request, res); +}; actions[Operations.Container.SET_CONTAINER_ACL] = (request, res) => { - setContainerAcl.process(request, res); -} + setContainerAcl.process(request, res); +}; actions[Operations.Container.GET_CONTAINER_ACL] = (request, res) => { - getContainerAcl.process(request, res); -} + getContainerAcl.process(request, res); +}; actions[Operations.Blob.SNAPSHOT_BLOB] = (request, res) => { - snapshotBlob.process(request, res); -} + snapshotBlob.process(request, res); +}; actions[Operations.Container.LEASE_CONTAINER] = (request, res) => { - leaseContainer.process(request, res); -} + leaseContainer.process(request, res); +}; actions[Operations.Blob.LEASE_BLOB] = (request, res) => { - leaseBlob.process(request, res); -} + leaseBlob.process(request, res); +}; actions[Operations.Blob.COPY_BLOB] = (request, res) => { - copyBlob.process(request, res); -} + copyBlob.process(request, res); +}; actions[Operations.Blob.ABORT_COPY_BLOB] = (request, res) => { - abortCopyBlob.process(request, res); -} \ No newline at end of file + abortCopyBlob.process(request, res); +}; diff --git a/lib/middleware/blob/authentication.js b/lib/middleware/blob/authentication.js index 28f897d10..2c3c97e40 100644 --- a/lib/middleware/blob/authentication.js +++ b/lib/middleware/blob/authentication.js @@ -1,116 +1,153 @@ -'use strict'; +/** @format */ -const BbPromise = require('bluebird'), - crypto = require('crypto'), - AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'), - env = require('./../../core/env'), - Keys = require('./../../core/Constants').Keys, - Operations = require('./../../core/Constants').Operations; +"use strict"; -module.exports = (req, res, next) => { - BbPromise.try(() => { - const request = req.azuriteRequest; - if (env.accountAuth) { - if (req.headers.authorization === undefined) throw new AError(ErrorCodes.AuthenticationFailed); - const match = /SharedKey devstoreaccount1:(.*)/.exec(req.headers.authorization); - if (match === null) throw new AError(ErrorCodes.AuthenticationFailed); - const sig = _generateAccountSignature(req); - if (sig.toString() != match[1].toString()){ - console.log("ERROR : Signature did not match!"); - throw new AError(ErrorCodes.AuthenticationFailed); - } +const BbPromise = require("bluebird"), + crypto = require("crypto"), + AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"), + env = require("./../../core/env"), + Keys = require("./../../core/Constants").Keys, + Operations = require("./../../core/Constants").Operations; - } +module.exports = (req, res, next) => { + BbPromise.try(() => { + const request = req.azuriteRequest; + if (env.accountAuth) { + if (req.headers.authorization === undefined) + throw new AError(ErrorCodes.AuthenticationFailed); + const match = /SharedKey devstoreaccount1:(.*)/.exec( + req.headers.authorization + ); + if (match === null) throw new AError(ErrorCodes.AuthenticationFailed); + const sig = _generateAccountSignature(req); + if (sig.toString() != match[1].toString()) { + console.log("ERROR : Signature did not match!"); + throw new AError(ErrorCodes.AuthenticationFailed); + } + } - if (request.query.sig === undefined) { - next(); - return; - } + if (request.query.sig === undefined) { + next(); + return; + } - const accessPolicy = { - permissions: request.query.sp, - start: request.query.st, - expiry: request.query.se, - canonicalizedResource: request.query.sr === 'c' - ? `/blob/devstoreaccount1/${request.containerName}` - : `/blob/devstoreaccount1/${request.containerName}/${request.blobName}`, - id: request.query.si, - ipAddressOrRange: request.query.sip, - protocols: request.query.spr, - version: request.query.sv, - rscc: request.query.rscc, - rscd: request.query.rscd, - rsce: request.query.rsce, - rscl: request.query.rscl, - rsct: request.query.rsct - }; + const accessPolicy = { + permissions: request.query.sp, + start: request.query.st, + expiry: request.query.se, + canonicalizedResource: + request.query.sr === "c" + ? `/blob/devstoreaccount1/${request.containerName}` + : `/blob/devstoreaccount1/${request.containerName}/${ + request.blobName + }`, + id: request.query.si, + ipAddressOrRange: request.query.sip, + protocols: request.query.spr, + version: request.query.sv, + rscc: request.query.rscc, + rscd: request.query.rscd, + rsce: request.query.rsce, + rscl: request.query.rscl, + rsct: request.query.rsct, + }; - const sig = _generateSignature(accessPolicy); - request.auth = {}; - request.auth.sasValid = sig === request.query.sig; - request.auth.accessPolicy = accessPolicy; - request.auth.resource = request.query.sr; - next(); - }).catch((e) => { - res.status(e.statusCode || 500).send(e.message); - if (!e.statusCode) throw e; - }); -} + const sig = _generateSignature(accessPolicy); + request.auth = {}; + request.auth.sasValid = sig === request.query.sig; + request.auth.accessPolicy = accessPolicy; + request.auth.resource = request.query.sr; + next(); + }).catch((e) => { + res.status(e.statusCode || 500).send(e.message); + if (!e.statusCode) throw e; + }); +}; function _generateAccountSignature(req) { - let str = `${req.method.toUpperCase()}\n` - str += req.headers['content-encoding'] === undefined ? `\n` : `${req.headers['content-encoding']}\n` - str += req.headers['content-language'] === undefined ? `\n` : `${req.headers['content-language']}\n` - str += req.headers['content-length'] === undefined || req.headers['content-length'] === '0' ? `\n` : `${req.headers['content-length']}\n` - str += req.headers['content-md5'] === undefined ? `\n` : `${req.headers['content-md5']}\n` - str += req.headers['content-type'] === undefined ? `\n` : `${req.headers['content-type']}\n` - str += req.headers['date'] === undefined ? `\n` : `${req.headers['date']}\n` - str += req.headers['if-modified-since'] === undefined ? `\n` : `${req.headers['if-modified-since']}\n` - str += req.headers['if-match'] === undefined ? `\n` : `${req.headers['if-match']}\n` - str += req.headers['if-none-match'] === undefined ? `\n` : `${req.headers['if-none-match']}\n` - str += req.headers['if-unmodified-since'] === undefined ? `\n` : `${req.headers['if-unmodified-since']}\n` - str += req.headers['range'] === undefined ? `\n` : `${req.headers['range']}\n` + let str = `${req.method.toUpperCase()}\n`; + str += + req.headers["content-encoding"] === undefined + ? `\n` + : `${req.headers["content-encoding"]}\n`; + str += + req.headers["content-language"] === undefined + ? `\n` + : `${req.headers["content-language"]}\n`; + str += + req.headers["content-length"] === undefined || + req.headers["content-length"] === "0" + ? `\n` + : `${req.headers["content-length"]}\n`; + str += + req.headers["content-md5"] === undefined + ? `\n` + : `${req.headers["content-md5"]}\n`; + str += + req.headers["content-type"] === undefined + ? `\n` + : `${req.headers["content-type"]}\n`; + str += req.headers["date"] === undefined ? `\n` : `${req.headers["date"]}\n`; + str += + req.headers["if-modified-since"] === undefined + ? `\n` + : `${req.headers["if-modified-since"]}\n`; + str += + req.headers["if-match"] === undefined + ? `\n` + : `${req.headers["if-match"]}\n`; + str += + req.headers["if-none-match"] === undefined + ? `\n` + : `${req.headers["if-none-match"]}\n`; + str += + req.headers["if-unmodified-since"] === undefined + ? `\n` + : `${req.headers["if-unmodified-since"]}\n`; + str += + req.headers["range"] === undefined ? `\n` : `${req.headers["range"]}\n`; - // copy all x-ms-XXX headers - var xms = {} - for (const key in req.headers) { - if (key.startsWith('x-ms-')) xms[key] = req.headers[key] - } - Object.keys(xms) - .sort() - .forEach(function(v, i) { - str += `${v}:${xms[v]}\n` - }) - str += `/devstoreaccount1${req._parsedUrl['pathname']}\n` + // copy all x-ms-XXX headers + var xms = {}; + for (const key in req.headers) { + if (key.startsWith("x-ms-")) xms[key] = req.headers[key]; + } + Object.keys(xms) + .sort() + .forEach(function(v, i) { + str += `${v}:${xms[v]}\n`; + }); + str += `/devstoreaccount1${req._parsedUrl["pathname"]}\n`; - Object.keys(req.query) - .sort() - .forEach(function(v, i) { - var qlist = req.query[v] - if (Array.isArray(req.query[v])) - qlist = req.query[v].sort() - str += `${v}:${qlist}\n` - }) + Object.keys(req.query) + .sort() + .forEach(function(v, i) { + var qlist = req.query[v]; + if (Array.isArray(req.query[v])) qlist = req.query[v].sort(); + str += `${v}:${qlist}\n`; + }); - str = str.slice(0, str.length - 1); - str = decodeURIComponent(str); - //console.log(str) - const sig = crypto.createHmac('sha256', Keys.DecodedAccessKey) - .update(str, 'utf8') - .digest('base64'); - return sig + str = str.slice(0, str.length - 1); + str = decodeURIComponent(str); + //console.log(str) + const sig = crypto + .createHmac("sha256", Keys.DecodedAccessKey) + .update(str, "utf8") + .digest("base64"); + return sig; } function _generateSignature(ap) { - let str = ''; - for (const key in ap) { - str += ap[key] === undefined ? `\n` : `${ap[key]}\n`; - } - str = str.slice(0, str.length - 1); - str = decodeURIComponent(str); - const sig = crypto.createHmac('sha256', Keys.DecodedAccessKey) - .update(str, 'utf8') - .digest('base64'); - return sig; -} \ No newline at end of file + let str = ""; + for (const key in ap) { + str += ap[key] === undefined ? `\n` : `${ap[key]}\n`; + } + str = str.slice(0, str.length - 1); + str = decodeURIComponent(str); + const sig = crypto + .createHmac("sha256", Keys.DecodedAccessKey) + .update(str, "utf8") + .digest("base64"); + return sig; +} diff --git a/lib/middleware/blob/cors.js b/lib/middleware/blob/cors.js index 105a1aa02..ef0bfb666 100644 --- a/lib/middleware/blob/cors.js +++ b/lib/middleware/blob/cors.js @@ -1,73 +1,99 @@ -'use strict'; +/** @format */ -const BbPromise = require('bluebird'), - AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'), - N = require('./../../core/HttpHeaderNames'), - Operations = require('./../../core/Constants').Operations, - sm = require('./../../core/blob/StorageManager'); +"use strict"; + +const BbPromise = require("bluebird"), + AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"), + N = require("./../../core/HttpHeaderNames"), + Operations = require("./../../core/Constants").Operations, + sm = require("./../../core/blob/StorageManager"); + +// Returns the dict of allowed headers for a given request +function getAllowedHeaders(req) { + const request = req.azuriteRequest; + if (req.azuriteOperation === Operations.Account.PREFLIGHT_BLOB_REQUEST) { + if (request.httpProps[N.ACCESS_CONTROL_REQUEST_HEADERS] === undefined) { + return {}; + } else { + return request.httpProps[N.ACCESS_CONTROL_REQUEST_HEADERS] + .toLowerCase() + .split(",") + .reduce((acc, e) => { + const key = Object.keys(e)[0]; + acc[key] = e[key]; + return acc; + }, {}); + } + } else { + return req.headers; + } +} // Performs CORS rule-validation iff CORS is enabled and request header 'origin' is set. module.exports = (req, res, next) => { - BbPromise.try(() => { - const request = req.azuriteRequest; - sm.getBlobServiceProperties(request) - .then((response) => { - if (response.payload.StorageServiceProperties && request.httpProps[N.ORIGIN]) { - const allowedMethods = req.azuriteOperation === Operations.Account.PREFLIGHT_BLOB_REQUEST - ? request.httpProps[N.ACCESS_CONTROL_REQUEST_METHOD].toLowerCase() - : req.method.toLowerCase(); + BbPromise.try(() => { + const request = req.azuriteRequest; + sm.getBlobServiceProperties(request).then((response) => { + if ( + response.payload.StorageServiceProperties && + request.httpProps[N.ORIGIN] + ) { + const allowedMethods = + req.azuriteOperation === Operations.Account.PREFLIGHT_BLOB_REQUEST + ? request.httpProps[N.ACCESS_CONTROL_REQUEST_METHOD].toLowerCase() + : req.method.toLowerCase(); - const allowedHeaders = req.azuriteOperation === Operations.Account.PREFLIGHT_BLOB_REQUEST - ? request.httpProps[N.ACCESS_CONTROL_REQUEST_HEADERS].toLowerCase().split(',') - .reduce((acc, e) => { - const key = Object.keys(e)[0]; - acc[key] = e[key]; - return acc; - }, {}) - : req.headers; + const allowedHeaders = getAllowedHeaders(req); - let valid = null; - for (const rule of response.payload.StorageServiceProperties.Cors.CorsRule) { - valid = false; - rule.AllowedOrigins = rule.AllowedOrigins.toLowerCase(); - rule.AllowedMethods = rule.AllowedMethods.toLowerCase(); - if (!rule.AllowedOrigins.includes(request.httpProps[N.ORIGIN]) && !rule.AllowedOrigins.includes('*')) { - continue; - } + let valid = null; + for (const rule of response.payload.StorageServiceProperties.Cors + .CorsRule) { + rule.AllowedOrigins = rule.AllowedOrigins.toLowerCase(); + rule.AllowedMethods = rule.AllowedMethods.toLowerCase(); + if ( + !rule.AllowedOrigins.includes(request.httpProps[N.ORIGIN]) && + !rule.AllowedOrigins.includes("*") + ) { + continue; + } - if (!rule.AllowedMethods.includes(allowedMethods)) { - continue; - } + if (!rule.AllowedMethods.includes(allowedMethods)) { + continue; + } - rule.AllowedHeaders.split(',') - .forEach((e) => { - Object.keys(allowedHeaders).forEach((requestHeader) => { - if (e.charAt(e.length - 1) === '*') { - valid = requestHeader.includes(e.slice(0, -1)); - } else { - valid = (e === requestHeader); - } - }); - }); - - if (valid) { - req.azuriteRequest.cors = {}; - req.azuriteRequest.cors.maxAgeInSeconds = rule.MaxAgeInSeconds; - req.azuriteRequest.cors.origin = request.httpProps[N.ORIGIN]; - req.azuriteRequest.cors.exposedHeaders = rule.ExposedHeaders; - break; - } - } - if (!valid && req.azuriteOperation === Operations.Account.PREFLIGHT_BLOB_REQUEST) { - throw new AError(ErrorCodes.CorsForbidden); - } - } - next(); + // Start at true to handle the case where allowedHeaders is an empty list + valid = true; + rule.AllowedHeaders.split(",").forEach((e) => { + Object.keys(allowedHeaders).forEach((requestHeader) => { + if (e.charAt(e.length - 1) === "*") { + valid = requestHeader.includes(e.slice(0, -1)); + } else { + valid = e === requestHeader; + } }); - return; - }).catch((e) => { - res.status(e.statusCode || 500).send(e.message); - if (!e.statusCode) throw e; + }); + + if (valid) { + req.azuriteRequest.cors = {}; + req.azuriteRequest.cors.maxAgeInSeconds = rule.MaxAgeInSeconds; + req.azuriteRequest.cors.origin = request.httpProps[N.ORIGIN]; + req.azuriteRequest.cors.exposedHeaders = rule.ExposedHeaders; + break; + } + } + if ( + !valid && + req.azuriteOperation === Operations.Account.PREFLIGHT_BLOB_REQUEST + ) { + throw new AError(ErrorCodes.CorsForbidden); + } + } + next(); }); -} \ No newline at end of file + return; + }).catch((e) => { + res.status(e.statusCode || 500).send(e.message); + if (!e.statusCode) throw e; + }); +}; diff --git a/lib/middleware/blob/validation.js b/lib/middleware/blob/validation.js index 9b7994e0c..7763a178b 100644 --- a/lib/middleware/blob/validation.js +++ b/lib/middleware/blob/validation.js @@ -1,337 +1,361 @@ -'use strict'; - -const BbPromise = require('bluebird'), - Operations = require('./../../core/Constants').Operations, - Usage = require('./../../core/Constants').Usage, - StorageEntityType = require('./../../core/Constants').StorageEntityType, - SasOperation = require('./../../core/Constants').ServiceSAS, - AzuriteContainerRequest = require('./../../model/blob/AzuriteContainerRequest'), - AzuriteBlobRequest = require('./../../model/blob/AzuriteBlobRequest'), - sm = require('./../../core/blob/StorageManager'), - // Validation modules - ValidationContext = require('./../../validation/blob/ValidationContext'), - AppendMaxBlobCommittedBlocksVal = require('./../../validation/blob/AppendMaxBlobCommittedBlocks'), - BlobCreationSizeVal = require('./../../validation/blob/BlobCreationSize'), - BlockPageSizeVal = require('./../../validation/blob/BlockPageSize'), - SupportedBlobTypeVal = require('./../../validation/blob/SupportedBlobType'), - CompatibleBlobTypeVal = require('./../../validation/blob/CompatibleBlobType'), - MD5Val = require('./../../validation/blob/MD5'), - ContentLengthExistsVal = require('./../../validation/blob/ContentLengthExists'), - ContainerExistsVal = require('./../../validation/blob/ContainerExists'), - BlobExistsVal = require('./../../validation/blob/BlobExists'), - BlobNameVal = require('./../../validation/blob/BlobName'), - BlobCommittedVal = require('./../../validation/blob/BlobCommitted'), - IsOfBlobTypeVal = require('./../../validation/blob/IsOfBlobType'), - RangeVal = require('./../../validation/blob/Range'), - PageAlignmentVal = require('./../../validation/blob/PageAlignment'), - NumOfSignedIdentifiersVal = require('./../../validation/NumOfSignedIdentifiers'), - PutBlobHeaderVal = require('./../../validation/blob/PutBlobHeaders'), - OriginHeaderVal = require('./../../validation/blob/OriginHeader'), - ConditionalRequestHeadersVal = require('./../../validation/blob/ConditionalRequestHeaders'), - AppendBlobConditionalRequestHeadersVal = require('./../../validation/blob/AppendBlobConditionalRequestHeaders'), - PageBlobHeaderSanityVal = require('./../../validation/blob/PageBlobHeaderSanity'), - AssociatedSnapshotDeletion = require('./../../validation/blob/AssociatedSnapshotsDeletion'), - LeaseActionsValidation = require('./../../validation/blob/LeaseActions'), - LeaseDurationValidation = require('./../../validation/blob/LeaseDuration'), - LeaseIdValidation = require('./../../validation/blob/LeaseId'), - ContainerLeaseUsageValidation = require('./../../validation/blob/ContainerLeaseUsage'), - ConflictingContainerVal = require('./../../validation/blob/ConflictingContainer'), - BlobLeaseUsageValidation = require('./../../validation/blob/BlobLeaseUsage'), - BlockListValidation = require('./../../validation/blob/BlockList'), - AbortCopyValidation = require('./../../validation/blob/AbortCopy'), - ServiceSignatureValidation = require('./../../validation/blob/ServiceSignature'), - ServicePropertiesValidation = require('./../../validation/blob/ServiceProperties'), - ContainerNameValidation = require('./../../validation/blob/ContainerName'), - CopyStatusValidation = require('./../../validation/blob/CopyStatus'); +/** @format */ + +"use strict"; + +const BbPromise = require("bluebird"), + Operations = require("./../../core/Constants").Operations, + Usage = require("./../../core/Constants").Usage, + StorageEntityType = require("./../../core/Constants").StorageEntityType, + SasOperation = require("./../../core/Constants").ServiceSAS, + AzuriteContainerRequest = require("./../../model/blob/AzuriteContainerRequest"), + AzuriteBlobRequest = require("./../../model/blob/AzuriteBlobRequest"), + sm = require("./../../core/blob/StorageManager"), + // Validation modules + ValidationContext = require("./../../validation/blob/ValidationContext"), + AppendMaxBlobCommittedBlocksVal = require("./../../validation/blob/AppendMaxBlobCommittedBlocks"), + BlobCreationSizeVal = require("./../../validation/blob/BlobCreationSize"), + BlockPageSizeVal = require("./../../validation/blob/BlockPageSize"), + SupportedBlobTypeVal = require("./../../validation/blob/SupportedBlobType"), + CompatibleBlobTypeVal = require("./../../validation/blob/CompatibleBlobType"), + MD5Val = require("./../../validation/blob/MD5"), + ContentLengthExistsVal = require("./../../validation/blob/ContentLengthExists"), + ContainerExistsVal = require("./../../validation/blob/ContainerExists"), + BlobExistsVal = require("./../../validation/blob/BlobExists"), + BlobNameVal = require("./../../validation/blob/BlobName"), + BlobCommittedVal = require("./../../validation/blob/BlobCommitted"), + IsOfBlobTypeVal = require("./../../validation/blob/IsOfBlobType"), + RangeVal = require("./../../validation/blob/Range"), + PageAlignmentVal = require("./../../validation/blob/PageAlignment"), + NumOfSignedIdentifiersVal = require("./../../validation/NumOfSignedIdentifiers"), + PutBlobHeaderVal = require("./../../validation/blob/PutBlobHeaders"), + OriginHeaderVal = require("./../../validation/blob/OriginHeader"), + ConditionalRequestHeadersVal = require("./../../validation/blob/ConditionalRequestHeaders"), + AppendBlobConditionalRequestHeadersVal = require("./../../validation/blob/AppendBlobConditionalRequestHeaders"), + PageBlobHeaderSanityVal = require("./../../validation/blob/PageBlobHeaderSanity"), + AssociatedSnapshotDeletion = require("./../../validation/blob/AssociatedSnapshotsDeletion"), + LeaseActionsValidation = require("./../../validation/blob/LeaseActions"), + LeaseDurationValidation = require("./../../validation/blob/LeaseDuration"), + LeaseIdValidation = require("./../../validation/blob/LeaseId"), + ContainerLeaseUsageValidation = require("./../../validation/blob/ContainerLeaseUsage"), + ConflictingContainerVal = require("./../../validation/blob/ConflictingContainer"), + BlobLeaseUsageValidation = require("./../../validation/blob/BlobLeaseUsage"), + BlockListValidation = require("./../../validation/blob/BlockList"), + AbortCopyValidation = require("./../../validation/blob/AbortCopy"), + ServiceSignatureValidation = require("./../../validation/blob/ServiceSignature"), + ServicePropertiesValidation = require("./../../validation/blob/ServiceProperties"), + ContainerNameValidation = require("./../../validation/blob/ContainerName"), + CopyStatusValidation = require("./../../validation/blob/CopyStatus"); module.exports = (req, res, next) => { - BbPromise.try(() => { - const request = req.azuriteRequest || {}; - // const { containerProxy } = sm._getCollectionAndContainer(request.containerName); - const o = sm._getCollectionAndContainer(request.containerName); - const containerProxy = o.containerProxy; - const blobId = request.parentId || request.id; - const { blobProxy } = sm._getCollectionAndBlob(request.containerName, blobId); - const validationContext = new ValidationContext({ - request: request, - containerProxy: containerProxy, - blobProxy: blobProxy - }) - validations[req.azuriteOperation](request, validationContext); - next(); - // Refactor me: Move this to bin/azurite (exception needs to carry res object), and handle entire exception handling there - }).catch((e) => { - res.status(e.statusCode || 500).send(e.message); - if (!e.statusCode) throw e; + BbPromise.try(() => { + const request = req.azuriteRequest || {}; + // const { containerProxy } = sm._getCollectionAndContainer(request.containerName); + const o = sm._getCollectionAndContainer(request.containerName); + const containerProxy = o.containerProxy; + const blobId = request.parentId || request.id; + const { blobProxy } = sm._getCollectionAndBlob( + request.containerName, + blobId + ); + const validationContext = new ValidationContext({ + request: request, + containerProxy: containerProxy, + blobProxy: blobProxy, }); -} + validations[req.azuriteOperation](request, validationContext); + next(); + // Refactor me: Move this to bin/azurite (exception needs to carry res object), and handle entire exception handling there + }).catch((e) => { + res.status(e.statusCode || 500).send(e.message); + if (!e.statusCode) throw e; + }); +}; const validations = {}; validations[undefined] = () => { - // NO VALIDATIONS (this is an unimplemented call) -} - -validations[Operations.Account.PREFLIGHT_BLOB_REQUEST] = (request, valContext) => { - valContext - .run(OriginHeaderVal); -} - -validations[Operations.Account.SET_BLOB_SERVICE_PROPERTIES] = (request, valContext) => { - valContext - .run(ServicePropertiesValidation); -} - -validations[Operations.Account.GET_BLOB_SERVICE_PROPERTIES] = (request, valContext) => { - // NO VALIDATIONS -} + // NO VALIDATIONS (this is an unimplemented call) +}; + +validations[Operations.Account.PREFLIGHT_BLOB_REQUEST] = ( + request, + valContext +) => { + valContext.run(OriginHeaderVal); +}; + +validations[Operations.Account.SET_BLOB_SERVICE_PROPERTIES] = ( + request, + valContext +) => { + valContext.run(ServicePropertiesValidation); +}; + +validations[Operations.Account.GET_BLOB_SERVICE_PROPERTIES] = ( + request, + valContext +) => { + // NO VALIDATIONS +}; validations[Operations.Account.LIST_CONTAINERS] = (request, valContext) => { - // NO VALIDATIONS -} + // NO VALIDATIONS +}; validations[Operations.Container.CREATE_CONTAINER] = (request, valContext) => { - valContext - .run(ConflictingContainerVal) - .run(ContainerNameValidation); -} + valContext.run(ConflictingContainerVal).run(ContainerNameValidation); +}; validations[Operations.Container.DELETE_CONTAINER] = (request, valContext) => { - valContext - .run(ContainerExistsVal) - .run(ContainerLeaseUsageValidation, { usage: Usage.Delete }); -} + valContext + .run(ContainerExistsVal) + .run(ContainerLeaseUsageValidation, { usage: Usage.Delete }); +}; validations[Operations.Blob.PUT_BLOB] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) - .run(MD5Val) - .run(ContainerExistsVal) - .run(BlobNameVal) - .run(CompatibleBlobTypeVal) - .run(SupportedBlobTypeVal) - .run(PutBlobHeaderVal) - .run(BlobCreationSizeVal) - .run(BlobLeaseUsageValidation, { usage: Usage.Write }) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) + .run(MD5Val) + .run(ContainerExistsVal) + .run(BlobNameVal) + .run(CompatibleBlobTypeVal) + .run(SupportedBlobTypeVal) + .run(PutBlobHeaderVal) + .run(BlobCreationSizeVal) + .run(BlobLeaseUsageValidation, { usage: Usage.Write }) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); +}; validations[Operations.Blob.APPEND_BLOCK] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.ADD }) - .run(BlobExistsVal) - .run(ContentLengthExistsVal) - .run(BlockPageSizeVal) - .run(MD5Val) - .run(AppendMaxBlobCommittedBlocksVal) - .run(CompatibleBlobTypeVal) - .run(BlobLeaseUsageValidation, { usage: Usage.Write }) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) - .run(AppendBlobConditionalRequestHeadersVal); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.ADD }) + .run(BlobExistsVal) + .run(ContentLengthExistsVal) + .run(BlockPageSizeVal) + .run(MD5Val) + .run(AppendMaxBlobCommittedBlocksVal) + .run(CompatibleBlobTypeVal) + .run(BlobLeaseUsageValidation, { usage: Usage.Write }) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) + .run(AppendBlobConditionalRequestHeadersVal); +}; validations[Operations.Blob.DELETE_BLOB] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.DELETE }) - .run(BlobExistsVal) - .run(AssociatedSnapshotDeletion, { collection: sm.db.getCollection(request.containerName) }) - .run(BlobLeaseUsageValidation, { usage: Usage.Write }) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.DELETE }) + .run(BlobExistsVal) + .run(AssociatedSnapshotDeletion, { + collection: sm.db.getCollection(request.containerName), + }) + .run(BlobLeaseUsageValidation, { usage: Usage.Write }) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); +}; validations[Operations.Blob.GET_BLOB] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.READ }) - .run(BlobExistsVal) - .run(BlobCommittedVal) - .run(RangeVal) - .run(BlobLeaseUsageValidation, { usage: Usage.Read }) - .run(ConditionalRequestHeadersVal, { usage: Usage.Read }) -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.READ }) + .run(BlobExistsVal) + .run(BlobCommittedVal) + .run(RangeVal) + .run(BlobLeaseUsageValidation, { usage: Usage.Read }) + .run(ConditionalRequestHeadersVal, { usage: Usage.Read }); +}; validations[Operations.Container.LIST_BLOBS] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.LIST }) - .run(ContainerExistsVal); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.LIST }) + .run(ContainerExistsVal); +}; validations[Operations.Blob.PUT_BLOCK] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) - .run(ContainerExistsVal) - .run(ContentLengthExistsVal) - .run(BlockPageSizeVal) - .run(MD5Val) - .run(CompatibleBlobTypeVal) - .run(BlobLeaseUsageValidation, { usage: Usage.Write }) -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) + .run(ContainerExistsVal) + .run(ContentLengthExistsVal) + .run(BlockPageSizeVal) + .run(MD5Val) + .run(CompatibleBlobTypeVal) + .run(BlobLeaseUsageValidation, { usage: Usage.Write }); +}; validations[Operations.Blob.PUT_BLOCK_LIST] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) - .run(ContainerExistsVal) - .run(CompatibleBlobTypeVal) - .run(BlockListValidation, { storageManager: sm }) - .run(BlobLeaseUsageValidation, { usage: Usage.Write }) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) + .run(ContainerExistsVal) + .run(CompatibleBlobTypeVal) + .run(BlockListValidation, { storageManager: sm }) + .run(BlobLeaseUsageValidation, { usage: Usage.Write }) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); +}; validations[Operations.Blob.GET_BLOCK_LIST] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.READ }) - .run(ContainerExistsVal) - .run(BlobExistsVal) - .run(IsOfBlobTypeVal, { entityType: StorageEntityType.BlockBlob }) - .run(BlobLeaseUsageValidation, { usage: Usage.Read }); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.READ }) + .run(ContainerExistsVal) + .run(BlobExistsVal) + .run(IsOfBlobTypeVal, { entityType: StorageEntityType.BlockBlob }) + .run(BlobLeaseUsageValidation, { usage: Usage.Read }); +}; validations[Operations.Blob.SET_BLOB_METADATA] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) - .run(ContainerExistsVal) - .run(BlobExistsVal) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) - .run(BlobLeaseUsageValidation, { usage: Usage.Write }); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) + .run(ContainerExistsVal) + .run(BlobExistsVal) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) + .run(BlobLeaseUsageValidation, { usage: Usage.Write }); +}; validations[Operations.Blob.GET_BLOB_METADATA] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.READ }) - .run(ContainerExistsVal) - .run(BlobExistsVal) - .run(BlobCommittedVal) - .run(BlobLeaseUsageValidation, { usage: Usage.Read }) - .run(ConditionalRequestHeadersVal, { usage: Usage.Read }); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.READ }) + .run(ContainerExistsVal) + .run(BlobExistsVal) + .run(BlobCommittedVal) + .run(BlobLeaseUsageValidation, { usage: Usage.Read }) + .run(ConditionalRequestHeadersVal, { usage: Usage.Read }); +}; validations[Operations.Blob.GET_BLOB_PROPERTIES] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.READ }) - .run(ContainerExistsVal) - .run(BlobExistsVal) - .run(BlobCommittedVal) - .run(BlobLeaseUsageValidation, { usage: Usage.Read }) - .run(ConditionalRequestHeadersVal, { usage: Usage.Read }); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.READ }) + .run(ContainerExistsVal) + .run(BlobExistsVal) + .run(BlobCommittedVal) + .run(BlobLeaseUsageValidation, { usage: Usage.Read }) + .run(ConditionalRequestHeadersVal, { usage: Usage.Read }); +}; validations[Operations.Blob.SET_BLOB_PROPERTIES] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) - .run(ContainerExistsVal) - .run(BlobExistsVal) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) - .run(BlobLeaseUsageValidation, { usage: Usage.Write }); -} - -validations[Operations.Container.SET_CONTAINER_METADATA] = (request, valContext) => { - valContext - .run(ContainerExistsVal) - .run(ContainerLeaseUsageValidation, { usage: Usage.Other }) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); -} - -validations[Operations.Container.GET_CONTAINER_METADATA] = (request, valContext) => { - valContext - .run(ContainerExistsVal) - .run(ContainerLeaseUsageValidation, { usage: Usage.Other }); -} - -validations[Operations.Container.GET_CONTAINER_PROPERTIES] = (request, valContext) => { - valContext - .run(ContainerExistsVal) - .run(ContainerLeaseUsageValidation, { usage: Usage.Other }); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) + .run(ContainerExistsVal) + .run(BlobExistsVal) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) + .run(BlobLeaseUsageValidation, { usage: Usage.Write }); +}; + +validations[Operations.Container.SET_CONTAINER_METADATA] = ( + request, + valContext +) => { + valContext + .run(ContainerExistsVal) + .run(ContainerLeaseUsageValidation, { usage: Usage.Other }) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); +}; + +validations[Operations.Container.GET_CONTAINER_METADATA] = ( + request, + valContext +) => { + valContext + .run(ContainerExistsVal) + .run(ContainerLeaseUsageValidation, { usage: Usage.Other }); +}; + +validations[Operations.Container.GET_CONTAINER_PROPERTIES] = ( + request, + valContext +) => { + valContext + .run(ContainerExistsVal) + .run(ContainerLeaseUsageValidation, { usage: Usage.Other }); +}; validations[Operations.Blob.PUT_PAGE] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) - .run(ContainerExistsVal) - .run(BlobExistsVal) - .run(ContentLengthExistsVal) - .run(IsOfBlobTypeVal, { entityType: StorageEntityType.PageBlob }) - .run(MD5Val) - .run(BlockPageSizeVal) - .run(PageAlignmentVal) - .run(PageBlobHeaderSanityVal) - .run(CompatibleBlobTypeVal) - .run(BlobLeaseUsageValidation, { usage: Usage.Write }) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) + .run(ContainerExistsVal) + .run(BlobExistsVal) + .run(ContentLengthExistsVal) + .run(IsOfBlobTypeVal, { entityType: StorageEntityType.PageBlob }) + .run(MD5Val) + .run(BlockPageSizeVal) + .run(PageAlignmentVal) + .run(PageBlobHeaderSanityVal) + .run(CompatibleBlobTypeVal) + .run(BlobLeaseUsageValidation, { usage: Usage.Write }) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); +}; validations[Operations.Blob.GET_PAGE_RANGES] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.READ }) - .run(ContainerExistsVal) - .run(BlobExistsVal) - .run(PageAlignmentVal) - .run(BlobLeaseUsageValidation, { usage: Usage.Read }) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.READ }) + .run(ContainerExistsVal) + .run(BlobExistsVal) + .run(PageAlignmentVal) + .run(BlobLeaseUsageValidation, { usage: Usage.Read }) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); +}; validations[Operations.Container.SET_CONTAINER_ACL] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) - .run(ContainerExistsVal) - .run(NumOfSignedIdentifiersVal) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) - .run(ContainerLeaseUsageValidation, { usage: Usage.Other }); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) + .run(ContainerExistsVal) + .run(NumOfSignedIdentifiersVal) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) + .run(ContainerLeaseUsageValidation, { usage: Usage.Other }); +}; validations[Operations.Container.GET_CONTAINER_ACL] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.READ }) - .run(ContainerExistsVal) - .run(ContainerLeaseUsageValidation, { usage: Usage.Other }); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.READ }) + .run(ContainerExistsVal) + .run(ContainerLeaseUsageValidation, { usage: Usage.Other }); +}; validations[Operations.Blob.SNAPSHOT_BLOB] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.CREATE }) - .run(ContainerExistsVal) - .run(BlobExistsVal) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) - .run(BlobLeaseUsageValidation, { usage: Usage.Read }); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.CREATE }) + .run(ContainerExistsVal) + .run(BlobExistsVal) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) + .run(BlobLeaseUsageValidation, { usage: Usage.Read }); +}; validations[Operations.Container.LEASE_CONTAINER] = (request, valContext) => { - valContext - .run(ContainerExistsVal) - .run(LeaseActionsValidation) - .run(LeaseDurationValidation) - .run(LeaseIdValidation) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); -} + valContext + .run(ContainerExistsVal) + .run(LeaseActionsValidation) + .run(LeaseDurationValidation) + .run(LeaseIdValidation) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }); +}; validations[Operations.Blob.LEASE_BLOB] = (request, valContext) => { - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) - .run(ContainerExistsVal) - .run(BlobExistsVal) - .run(LeaseDurationValidation) - .run(LeaseIdValidation) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) - .run(LeaseActionsValidation); -} + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) + .run(ContainerExistsVal) + .run(BlobExistsVal) + .run(LeaseDurationValidation) + .run(LeaseIdValidation) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) + .run(LeaseActionsValidation); +}; validations[Operations.Blob.COPY_BLOB] = (request, valContext) => { - // Source Validation - const sourceBlobProxy = sm._getCopySourceProxy(request); - const ret = sm._getCollectionAndContainer((request.copySourceName()).sourceContainerName), - sourceContainerProxy = ret.containerProxy; - valContext - .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) - .run(ContainerExistsVal, { containerProxy: sourceContainerProxy }) - .run(BlobExistsVal, { blobProxy: sourceBlobProxy }); - - // Target Validation - valContext - .run(ContainerExistsVal) - .run(CompatibleBlobTypeVal, { request: { entityType: sourceBlobProxy.original.entityType } }) - .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) - .run(CopyStatusValidation); -} + // Source Validation + const sourceBlobProxy = sm._getCopySourceProxy(request); + const ret = sm._getCollectionAndContainer( + request.copySourceName().sourceContainerName + ), + sourceContainerProxy = ret.containerProxy; + valContext + .run(ServiceSignatureValidation, { sasOperation: SasOperation.Blob.WRITE }) + .run(ContainerExistsVal, { containerProxy: sourceContainerProxy }) + .run(BlobExistsVal, { blobProxy: sourceBlobProxy }); + + // Target Validation + valContext + .run(ContainerExistsVal) + .run(CompatibleBlobTypeVal, { + request: { entityType: sourceBlobProxy.original.entityType }, + }) + .run(ConditionalRequestHeadersVal, { usage: Usage.Write }) + .run(CopyStatusValidation); +}; validations[Operations.Blob.ABORT_COPY_BLOB] = (request, valContext) => { - valContext - .run(AbortCopyValidation); -} \ No newline at end of file + valContext.run(AbortCopyValidation); +}; diff --git a/lib/middleware/queue/actions.js b/lib/middleware/queue/actions.js index b3b6e1bd4..d08c1ab67 100644 --- a/lib/middleware/queue/actions.js +++ b/lib/middleware/queue/actions.js @@ -1,85 +1,87 @@ -'use strict'; - -const BbPromise = require('bluebird'), - Operations = require('./../../core/Constants').Operations, - // Actions - deleteQueue = require('./../../actions/queue/DeleteQueue'), - setQueueMetadata = require('./../../actions/queue/SetQueueMetadata'), - getQueueMetadata = require('./../../actions/queue/GetQueueMetadata'), - putMessage = require('./../../actions/queue/PutMessage'), - getMessages = require('./../../actions/queue/GetMessages'), - clearMessages = require('./../../actions/queue/ClearMessages'), - peekMessages = require('./../../actions/queue/PeekMessages'), - deleteMessage = require('./../../actions/queue/DeleteMessage'), - updateMessage = require('./../../actions/queue/UpdateMessage'), - listQueues = require('./../../actions/queue/ListQueues'), - setQueueAcl = require('./../../actions/queue/SetQueueAcl'), - getQueueAcl = require('./../../actions/queue/GetQueueAcl'), - createQueue = require('./../../actions/queue/CreateQueue'); +/** @format */ + +"use strict"; + +const BbPromise = require("bluebird"), + Operations = require("./../../core/Constants").Operations, + // Actions + deleteQueue = require("./../../actions/queue/DeleteQueue"), + setQueueMetadata = require("./../../actions/queue/SetQueueMetadata"), + getQueueMetadata = require("./../../actions/queue/GetQueueMetadata"), + putMessage = require("./../../actions/queue/PutMessage"), + getMessages = require("./../../actions/queue/GetMessages"), + clearMessages = require("./../../actions/queue/ClearMessages"), + peekMessages = require("./../../actions/queue/PeekMessages"), + deleteMessage = require("./../../actions/queue/DeleteMessage"), + updateMessage = require("./../../actions/queue/UpdateMessage"), + listQueues = require("./../../actions/queue/ListQueues"), + setQueueAcl = require("./../../actions/queue/SetQueueAcl"), + getQueueAcl = require("./../../actions/queue/GetQueueAcl"), + createQueue = require("./../../actions/queue/CreateQueue"); module.exports = (req, res) => { - BbPromise.try(() => { - actions[req.azuriteOperation](req.azuriteRequest, res); - }).catch((e) => { - res.status(e.statusCode || 500).send(e.message); - if (!e.statusCode) throw e; - }); -} + BbPromise.try(() => { + actions[req.azuriteOperation](req.azuriteRequest, res); + }).catch((e) => { + res.status(e.statusCode || 500).send(e.message); + if (!e.statusCode) throw e; + }); +}; const actions = {}; actions[undefined] = (request, res) => { - res.status(501).send('Not Implemented yet.'); -} + res.status(501).send("Not Implemented yet."); +}; actions[Operations.Queue.LIST_QUEUES] = (request, res) => { - listQueues.process(request, res); -} + listQueues.process(request, res); +}; actions[Operations.Queue.CREATE_QUEUE] = (request, res) => { - createQueue.process(request, res); -} + createQueue.process(request, res); +}; actions[Operations.Queue.DELETE_QUEUE] = (request, res) => { - deleteQueue.process(request, res); -} + deleteQueue.process(request, res); +}; actions[Operations.Queue.SET_QUEUE_METADATA] = (request, res) => { - setQueueMetadata.process(request, res); -} + setQueueMetadata.process(request, res); +}; actions[Operations.Queue.GET_QUEUE_METADATA] = (request, res) => { - getQueueMetadata.process(request, res); -} + getQueueMetadata.process(request, res); +}; actions[Operations.Queue.PUT_MESSAGE] = (request, res) => { - putMessage.process(request, res); -} + putMessage.process(request, res); +}; actions[Operations.Queue.GET_MESSAGE] = (request, res) => { - getMessages.process(request, res); -} + getMessages.process(request, res); +}; actions[Operations.Queue.CLEAR_MESSAGES] = (request, res) => { - clearMessages.process(request, res); -} + clearMessages.process(request, res); +}; actions[Operations.Queue.PEEK_MESSAGES] = (request, res) => { - peekMessages.process(request, res); -} + peekMessages.process(request, res); +}; actions[Operations.Queue.DELETE_MESSAGE] = (request, res) => { - deleteMessage.process(request, res); -} + deleteMessage.process(request, res); +}; actions[Operations.Queue.UPDATE_MESSAGE] = (request, res) => { - updateMessage.process(request, res); -} + updateMessage.process(request, res); +}; actions[Operations.Queue.SET_QUEUE_ACL] = (request, res) => { - setQueueAcl.process(request, res); -} + setQueueAcl.process(request, res); +}; actions[Operations.Queue.GET_QUEUE_ACL] = (request, res) => { - getQueueAcl.process(request, res); -} \ No newline at end of file + getQueueAcl.process(request, res); +}; diff --git a/lib/middleware/queue/validation.js b/lib/middleware/queue/validation.js index 212fd714e..a0d1fb59f 100644 --- a/lib/middleware/queue/validation.js +++ b/lib/middleware/queue/validation.js @@ -1,122 +1,115 @@ -'use strict'; - -const BbPromise = require('bluebird'), - Operations = require('./../../core/Constants').Operations, - AzuriteQueueRequest = require('./../../model/queue/AzuriteQueueRequest'), - QueueManager = require('./../../core/queue/QueueManager'), - // Validation modules - ValidationContext = require('./../../validation/queue/ValidationContext'), - QueueCreationValidation = require('./../../validation/queue/QueueCreation'), - QueueExistsValidation = require('./../../validation/queue/QueueExists'), - QueueMessageSizeValidation = require('./../../validation/queue/QueueMessageSize'), - NumOfMessagesValidation = require('./../../validation/queue/NumOfMessages'), - QueueNameValidation = require('./../../validation/queue/QueueName'), - MessageExistsValidation = require('./../../validation/queue/MessageExists'), - PopReceiptValidation = require('./../../validation/queue/PopReceipt'), - VisibilityTimeoutValueValidation = require('./../../validation/queue/VisibilityTimeoutValue'), - MessageExpired = require('./../../validation/queue/MessageExpired'), - NumOfSignedIdentifiersVal = require('./../../validation/NumOfSignedIdentifiers'); - +/** @format */ + +"use strict"; + +const BbPromise = require("bluebird"), + Operations = require("./../../core/Constants").Operations, + AzuriteQueueRequest = require("./../../model/queue/AzuriteQueueRequest"), + QueueManager = require("./../../core/queue/QueueManager"), + // Validation modules + ValidationContext = require("./../../validation/queue/ValidationContext"), + QueueCreationValidation = require("./../../validation/queue/QueueCreation"), + QueueExistsValidation = require("./../../validation/queue/QueueExists"), + QueueMessageSizeValidation = require("./../../validation/queue/QueueMessageSize"), + NumOfMessagesValidation = require("./../../validation/queue/NumOfMessages"), + QueueNameValidation = require("./../../validation/queue/QueueName"), + MessageExistsValidation = require("./../../validation/queue/MessageExists"), + PopReceiptValidation = require("./../../validation/queue/PopReceipt"), + VisibilityTimeoutValueValidation = require("./../../validation/queue/VisibilityTimeoutValue"), + MessageExpired = require("./../../validation/queue/MessageExpired"), + NumOfSignedIdentifiersVal = require("./../../validation/NumOfSignedIdentifiers"); module.exports = (req, res, next) => { - BbPromise.try(() => { - const request = req.azuriteRequest || {}; - const { queue, message } = QueueManager.getQueueAndMessage({ queueName: request.queueName, messageId: request.messageId }); - const validationContext = new ValidationContext({ - request: request, - queue: queue, - message: message, - operation: req.azuriteOperation - }) - validations[req.azuriteOperation](validationContext); - next(); - }).catch((e) => { - res.status(e.statusCode || 500).send(e.message); - if (!e.statusCode) throw e; + BbPromise.try(() => { + const request = req.azuriteRequest || {}; + const { queue, message } = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + messageId: request.messageId, + }); + const validationContext = new ValidationContext({ + request: request, + queue: queue, + message: message, + operation: req.azuriteOperation, }); -} + validations[req.azuriteOperation](validationContext); + next(); + }).catch((e) => { + res.status(e.statusCode || 500).send(e.message); + if (!e.statusCode) throw e; + }); +}; const validations = {}; validations[undefined] = () => { - // NO VALIDATIONS (this is an unimplemented call) -} + // NO VALIDATIONS (this is an unimplemented call) +}; validations[Operations.Queue.LIST_QUEUES] = (valContext) => { - // NO VALIDATIONS -} + // NO VALIDATIONS +}; validations[Operations.Queue.CREATE_QUEUE] = (valContext) => { - valContext - .run(QueueNameValidation) - .run(QueueCreationValidation); -} + valContext.run(QueueNameValidation).run(QueueCreationValidation); +}; validations[Operations.Queue.DELETE_QUEUE] = (valContext) => { - valContext - .run(QueueExistsValidation); -} + valContext.run(QueueExistsValidation); +}; validations[Operations.Queue.SET_QUEUE_METADATA] = (valContext) => { - valContext - .run(QueueExistsValidation); -} + valContext.run(QueueExistsValidation); +}; validations[Operations.Queue.GET_QUEUE_METADATA] = (valContext) => { - valContext - .run(QueueExistsValidation); -} + valContext.run(QueueExistsValidation); +}; validations[Operations.Queue.PUT_MESSAGE] = (valContext) => { - valContext - .run(QueueExistsValidation) - .run(VisibilityTimeoutValueValidation) - .run(QueueMessageSizeValidation); -} + valContext + .run(QueueExistsValidation) + .run(VisibilityTimeoutValueValidation) + .run(QueueMessageSizeValidation); +}; validations[Operations.Queue.GET_MESSAGE] = (valContext) => { - valContext - .run(QueueExistsValidation) - .run(VisibilityTimeoutValueValidation) - .run(NumOfMessagesValidation); -} + valContext + .run(QueueExistsValidation) + .run(VisibilityTimeoutValueValidation) + .run(NumOfMessagesValidation); +}; validations[Operations.Queue.CLEAR_MESSAGES] = (valContext) => { - valContext - .run(QueueExistsValidation); -} + valContext.run(QueueExistsValidation); +}; validations[Operations.Queue.PEEK_MESSAGES] = (valContext) => { - valContext - .run(QueueExistsValidation) - .run(NumOfMessagesValidation); -} + valContext.run(QueueExistsValidation).run(NumOfMessagesValidation); +}; validations[Operations.Queue.DELETE_MESSAGE] = (valContext) => { - valContext - .run(QueueExistsValidation) - .run(MessageExistsValidation) - .run(MessageExpired) - .run(PopReceiptValidation); -} + valContext + .run(QueueExistsValidation) + .run(MessageExistsValidation) + .run(MessageExpired) + .run(PopReceiptValidation); +}; validations[Operations.Queue.UPDATE_MESSAGE] = (valContext) => { - valContext - .run(QueueExistsValidation) - .run(QueueMessageSizeValidation) - .run(MessageExistsValidation) - .run(MessageExpired) - .run(VisibilityTimeoutValueValidation) - .run(PopReceiptValidation); -} + valContext + .run(QueueExistsValidation) + .run(QueueMessageSizeValidation) + .run(MessageExistsValidation) + .run(MessageExpired) + .run(VisibilityTimeoutValueValidation) + .run(PopReceiptValidation); +}; validations[Operations.Queue.SET_QUEUE_ACL] = (valContext) => { - valContext - .run(QueueExistsValidation) - .run(NumOfSignedIdentifiersVal); -} + valContext.run(QueueExistsValidation).run(NumOfSignedIdentifiersVal); +}; validations[Operations.Queue.GET_QUEUE_ACL] = (valContext) => { - valContext - .run(QueueExistsValidation); - } \ No newline at end of file + valContext.run(QueueExistsValidation); +}; diff --git a/lib/middleware/table/actions.js b/lib/middleware/table/actions.js index 1ffa39cc8..0672c1b65 100644 --- a/lib/middleware/table/actions.js +++ b/lib/middleware/table/actions.js @@ -1,69 +1,71 @@ -'use strict'; +/** @format */ -const BbPromise = require('bluebird'), - Operations = require('./../../core/Constants').Operations.Table, - insertEntity = require('./../../actions/table/InsertEntity'), - deleteTable = require('./../../actions/table/DeleteTable'), - deleteEntity = require('./../../actions/table/DeleteEntity'), - queryTable = require('./../../actions/table/QueryTable'), - queryEntities = require('./../../actions/table/QueryEntities'), - updateEntity = require('./../../actions/table/UpdateEntity'), - insertOrReplaceEntity = require('./../../actions/table/InsertOrReplaceEntity'), - mergeEntity = require('./../../actions/table/MergeEntity'), - insertOrMergeEntity = require('./../../actions/table/InsertOrMergeEntity'), - createTable = require('./../../actions/table/CreateTable'); +"use strict"; + +const BbPromise = require("bluebird"), + Operations = require("./../../core/Constants").Operations.Table, + insertEntity = require("./../../actions/table/InsertEntity"), + deleteTable = require("./../../actions/table/DeleteTable"), + deleteEntity = require("./../../actions/table/DeleteEntity"), + queryTable = require("./../../actions/table/QueryTable"), + queryEntities = require("./../../actions/table/QueryEntities"), + updateEntity = require("./../../actions/table/UpdateEntity"), + insertOrReplaceEntity = require("./../../actions/table/InsertOrReplaceEntity"), + mergeEntity = require("./../../actions/table/MergeEntity"), + insertOrMergeEntity = require("./../../actions/table/InsertOrMergeEntity"), + createTable = require("./../../actions/table/CreateTable"); module.exports = (req, res) => { - BbPromise.try(() => { - actions[req.azuriteOperation](req.azuriteRequest, res); - }).catch((e) => { - res.status(e.statusCode || 500).send(e.message); - if (!e.statusCode) throw e; - }); -} + BbPromise.try(() => { + actions[req.azuriteOperation](req.azuriteRequest, res); + }).catch((e) => { + res.status(e.statusCode || 500).send(e.message); + if (!e.statusCode) throw e; + }); +}; const actions = {}; actions[undefined] = (request, res) => { - res.status(501).send('Not Implemented yet.'); -} + res.status(501).send("Not Implemented yet."); +}; actions[Operations.CREATE_TABLE] = (request, res) => { - createTable.process(request, res); -} + createTable.process(request, res); +}; actions[Operations.INSERT_ENTITY] = (request, res) => { - insertEntity.process(request, res); -} + insertEntity.process(request, res); +}; actions[Operations.DELETE_TABLE] = (request, res) => { - deleteTable.process(request, res); -} + deleteTable.process(request, res); +}; actions[Operations.DELETE_ENTITY] = (request, res) => { - deleteEntity.process(request, res); -} + deleteEntity.process(request, res); +}; actions[Operations.QUERY_TABLE] = (request, res) => { - queryTable.process(request, res); -} + queryTable.process(request, res); +}; actions[Operations.QUERY_ENTITY] = (request, res) => { - queryEntities.process(request, res); -} + queryEntities.process(request, res); +}; actions[Operations.UPDATE_ENTITY] = (request, res) => { - updateEntity.process(request, res); -} + updateEntity.process(request, res); +}; actions[Operations.INSERT_OR_REPLACE_ENTITY] = (request, res) => { - insertOrReplaceEntity.process(request, res); -} + insertOrReplaceEntity.process(request, res); +}; actions[Operations.MERGE_ENTITY] = (request, res) => { - mergeEntity.process(request, res); -} + mergeEntity.process(request, res); +}; actions[Operations.INSERT_OR_MERGE_ENTITY] = (request, res) => { - insertOrMergeEntity.process(request, res); -} \ No newline at end of file + insertOrMergeEntity.process(request, res); +}; diff --git a/lib/middleware/table/validation.js b/lib/middleware/table/validation.js index e5536c0ca..f6d2766e8 100644 --- a/lib/middleware/table/validation.js +++ b/lib/middleware/table/validation.js @@ -1,102 +1,99 @@ -'use strict'; +/** @format */ -const BbPromise = require('bluebird'), - N = require('./../../core/HttpHeaderNames'), - AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'), - Operations = require('./../../core/Constants').Operations.Table, - tsm = require('./../../core/table/TableStorageManager'), - ValidationContext = require('./../../validation/table/ValidationContext'), - TableExistsVal = require('./../../validation/table/TableExists'), - ConflictingEntityVal = require('./../../validation/table/ConflictingEntity'), - TableNameVal = require('./../../validation/table/TableName'), - EntityExistsVal = require('./../../validation/table/EntityExists'), - EntityIfMatchVal = require('./../../validation/table/EntityIfMatch'), - ConflictingTableVal = require('./../../validation/table/ConflictingTable'); +"use strict"; + +const BbPromise = require("bluebird"), + N = require("./../../core/HttpHeaderNames"), + AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"), + Operations = require("./../../core/Constants").Operations.Table, + tsm = require("./../../core/table/TableStorageManager"), + ValidationContext = require("./../../validation/table/ValidationContext"), + TableExistsVal = require("./../../validation/table/TableExists"), + ConflictingEntityVal = require("./../../validation/table/ConflictingEntity"), + TableNameVal = require("./../../validation/table/TableName"), + EntityExistsVal = require("./../../validation/table/EntityExists"), + EntityIfMatchVal = require("./../../validation/table/EntityIfMatch"), + ConflictingTableVal = require("./../../validation/table/ConflictingTable"); module.exports = (req, res, next) => { - BbPromise.try(() => { - // Azurite currently does not support XML-Atom responses, only supports JSON-based responses. - if (req.headers[N.CONTENT_TYPE] === `application/atom+xml`) { - throw new AError(ErrorCodes.AtomXmlNotSupported); - } - const request = req.azuriteRequest, - tableProxy = tsm._getTable(request.tableName), - entityProxy = tsm._getEntity(request.tableName, request.partitionKey, request.rowKey), - validationContext = new ValidationContext({ - request: request, - table: tableProxy, - entity: entityProxy - }); - validations[req.azuriteOperation](validationContext); - next(); - }).catch((e) => { - // in order to avoid PANIC and better support Azure Storage Explorer - // sending not implemented instead of server error - res.status(e.statusCode || 501).send(e.message); - }); -} + BbPromise.try(() => { + // Azurite currently does not support XML-Atom responses, only supports JSON-based responses. + if (req.headers[N.CONTENT_TYPE] === `application/atom+xml`) { + throw new AError(ErrorCodes.AtomXmlNotSupported); + } + const request = req.azuriteRequest, + tableProxy = tsm._getTable(request.tableName), + entityProxy = tsm._getEntity( + request.tableName, + request.partitionKey, + request.rowKey + ), + validationContext = new ValidationContext({ + request: request, + table: tableProxy, + entity: entityProxy, + }); + validations[req.azuriteOperation](validationContext); + next(); + }).catch((e) => { + // in order to avoid PANIC and better support Azure Storage Explorer + // sending not implemented instead of server error + res.status(e.statusCode || 501).send(e.message); + }); +}; const validations = {}; validations[undefined] = () => { - // NO VALIDATIONS (this is an unimplemented call) -} + // NO VALIDATIONS (this is an unimplemented call) +}; validations[Operations.CREATE_TABLE] = (valContext) => { - valContext - .run(ConflictingTableVal) - .run(TableNameVal); -} + valContext.run(ConflictingTableVal).run(TableNameVal); +}; validations[Operations.INSERT_ENTITY] = (valContext) => { - valContext - .run(TableExistsVal) - .run(ConflictingEntityVal); -} + valContext.run(TableExistsVal).run(ConflictingEntityVal); +}; validations[Operations.DELETE_TABLE] = (valContext) => { - valContext - .run(TableExistsVal); -} + valContext.run(TableExistsVal); +}; validations[Operations.DELETE_ENTITY] = (valContext) => { - valContext - .run(TableExistsVal) - .run(EntityExistsVal) - .run(EntityIfMatchVal); -} + valContext + .run(TableExistsVal) + .run(EntityExistsVal) + .run(EntityIfMatchVal); +}; validations[Operations.QUERY_TABLE] = (valContext) => { - valContext - .run(TableExistsVal) -} + valContext.run(TableExistsVal); +}; validations[Operations.QUERY_ENTITY] = (valContext) => { - valContext - .run(TableExistsVal) -} + valContext.run(TableExistsVal); +}; validations[Operations.UPDATE_ENTITY] = (valContext) => { - valContext - .run(TableExistsVal) - .run(EntityExistsVal) - .run(EntityIfMatchVal); -} + valContext + .run(TableExistsVal) + .run(EntityExistsVal) + .run(EntityIfMatchVal); +}; validations[Operations.INSERT_OR_REPLACE_ENTITY] = (valContext) => { - valContext - .run(TableExistsVal); -} + valContext.run(TableExistsVal); +}; validations[Operations.MERGE_ENTITY] = (valContext) => { - valContext - .run(TableExistsVal) - .run(EntityExistsVal) - .run(EntityIfMatchVal); -} + valContext + .run(TableExistsVal) + .run(EntityExistsVal) + .run(EntityIfMatchVal); +}; validations[Operations.INSERT_OR_MERGE_ENTITY] = (valContext) => { - valContext - .run(TableExistsVal); -} \ No newline at end of file + valContext.run(TableExistsVal); +}; diff --git a/lib/model/blob/AppendBlobProxy.js b/lib/model/blob/AppendBlobProxy.js index a773b47c4..2ed499f3a 100644 --- a/lib/model/blob/AppendBlobProxy.js +++ b/lib/model/blob/AppendBlobProxy.js @@ -1,23 +1,25 @@ -'use strict'; +/** @format */ -const crypto = require('crypto'), - BlobProxy = require('./BlobProxy'), - HeaderNames = require('./../../core/HttpHeaderNames'), - InternalAzuriteError = require('./../../core/InternalAzuriteError'); +"use strict"; + +const crypto = require("crypto"), + BlobProxy = require("./BlobProxy"), + HeaderNames = require("./../../core/HttpHeaderNames"), + InternalAzuriteError = require("./../../core/InternalAzuriteError"); /** - * Serves as a Append blob proxy to the corresponding LokiJS object. - * + * Serves as a Append blob proxy to the corresponding LokiJS object. + * * @class AppendBlobProxy */ class AppendBlobProxy extends BlobProxy { - constructor(original, containerName) { - super(original, container); - } + constructor(original, containerName) { + super(original, container); + } - incrementCommittedBlockCount() { - this.original[HeaderNames.BLOB_COMMITTED_BLOCK_COUNT] += 1; - } + incrementCommittedBlockCount() { + this.original[HeaderNames.BLOB_COMMITTED_BLOCK_COUNT] += 1; + } } -module.exports = AppendBlobProxy; \ No newline at end of file +module.exports = AppendBlobProxy; diff --git a/lib/model/blob/AzuriteBlobRequest.js b/lib/model/blob/AzuriteBlobRequest.js index 7e25bb2fd..43cfa87bd 100644 --- a/lib/model/blob/AzuriteBlobRequest.js +++ b/lib/model/blob/AzuriteBlobRequest.js @@ -1,96 +1,113 @@ -'use strict'; +/** @format */ -const crypto = require('crypto'), - url = require('url'), - EntityType = require('./../../core/Constants').StorageEntityType, - BlockListType = require('./../../core/Constants').BlockListType, - AzuriteRequest = require('./AzuriteRequest'), - N = require('./../../core/HttpHeaderNames'), - env = require('./../../core/env'), - InternalAzuriteError = require('./../../core/InternalAzuriteError'); +"use strict"; -class AzuriteBlobRequest extends AzuriteRequest { - constructor({ - req = undefined, - entityType = undefined, - payload = undefined }) { +const crypto = require("crypto"), + url = require("url"), + EntityType = require("./../../core/Constants").StorageEntityType, + BlockListType = require("./../../core/Constants").BlockListType, + AzuriteRequest = require("./AzuriteRequest"), + N = require("./../../core/HttpHeaderNames"), + env = require("./../../core/env"), + InternalAzuriteError = require("./../../core/InternalAzuriteError"); - super({ - req: req, - entityType: entityType || req.headers['x-ms-blob-type'], - payload: payload - }); - this.containerName = req.params.container; - this.blobName = req.params[0]; - this.blockId = req.query.blockid; - this.snapshot = false; - this.copyId = req.query.copyid; - // Per default, all (block) blobs will be set to committed by EntityGenerator - this.commit = true; - this.blockListType = this.query.blocklisttype || BlockListType.COMMITTED - if (this.query.snapshot) { - this.snapshotDate = new Date(this.query.snapshot).toUTCString(); - this.snapshot = true; - this.id = env.snapshotId(this.containerName, this.blobName, this.snapshotDate); - this.originId = env.blobId(this.containerName, this.blobName); - this.originUri = env.diskStorageUri(this.originId); - } else if (this.blockId) { - this.id = env.blockId(this.containerName, this.blobName, this.blockId); - this.parentId = env.blobId(this.containerName, this.blobName); - this.parentUri = env.diskStorageUri(this.parentId); - } else { - this.id = env.blobId(this.containerName, this.blobName); - } - this.uri = env.diskStorageUri(this.id); +class AzuriteBlobRequest extends AzuriteRequest { + constructor({ + req = undefined, + entityType = undefined, + payload = undefined, + }) { + super({ + req: req, + entityType: entityType || req.headers["x-ms-blob-type"], + payload: payload, + }); + this.containerName = req.params.container; + this.blobName = req.params[0]; + this.blockId = req.query.blockid; + this.snapshot = false; + this.copyId = req.query.copyid; + // Per default, all (block) blobs will be set to committed by EntityGenerator + this.commit = true; + this.blockListType = this.query.blocklisttype || BlockListType.COMMITTED; + if (this.query.snapshot) { + this.snapshotDate = new Date(this.query.snapshot).toUTCString(); + this.snapshot = true; + this.id = env.snapshotId( + this.containerName, + this.blobName, + this.snapshotDate + ); + this.originId = env.blobId(this.containerName, this.blobName); + this.originUri = env.diskStorageUri(this.originId); + } else if (this.blockId) { + this.id = env.blockId(this.containerName, this.blobName, this.blockId); + this.parentId = env.blobId(this.containerName, this.blobName); + this.parentUri = env.diskStorageUri(this.parentId); + } else { + this.id = env.blobId(this.containerName, this.blobName); } + this.uri = env.diskStorageUri(this.id); + } - static clone(request) { - const copy = new AzuriteBlobRequest({ req: { rawHeaders: [], headers: {}, params: {}, query: {} }, entityType: request.entityType, payload: request.payload }); - Object.assign(copy, request); - return copy; - } + static clone(request) { + const copy = new AzuriteBlobRequest({ + req: { rawHeaders: [], headers: {}, params: {}, query: {} }, + entityType: request.entityType, + payload: request.payload, + }); + Object.assign(copy, request); + return copy; + } - calculateContentMd5() { - if (!this.body) { - return undefined; - } - return crypto.createHash('md5') - .update(this.body) - .digest('base64'); + calculateContentMd5() { + if (!this.body) { + return undefined; } + return crypto + .createHash("md5") + .update(this.body) + .digest("base64"); + } - isSnapshot() { - return this.snapshot; - } + isSnapshot() { + return this.snapshot; + } - copySourceName() { - if (this.httpProps[N.COPY_SOURCE === undefined]) { - throw new InternalAzuriteError('Request: copySourceUrl was called without copy-source header set.') - } - const match = /devstoreaccount1\/(.*)/.exec(this.httpProps[N.COPY_SOURCE]); - if (match === null) { - throw new InternalAzuriteError(`Request: x-ms-copy-source was not in the expected format (was "${this.httpProps[N.COPY_SOURCE]}".`); - } - const source = match[1]; - const pathname = url.parse(source).pathname; - const parts = pathname.split('/'), - containerName = parts[0]; - parts.splice(0, 1); - const blobName = decodeURIComponent(parts.join('/')); // unicode characters in http headers are encoded! - const query = url.parse(source).query; - let date = undefined; - const regex = /snapshot=([^&]*)/; - const ssMatch = regex.exec(query); - if (ssMatch !== null) { - const dateStr = ssMatch[1]; - date = new Date(decodeURIComponent(dateStr)).toUTCString(); - } - return { - sourceContainerName: containerName, - sourceBlobName: blobName, - date: date - }; + copySourceName() { + if (this.httpProps[N.COPY_SOURCE === undefined]) { + throw new InternalAzuriteError( + "Request: copySourceUrl was called without copy-source header set." + ); + } + const match = /devstoreaccount1\/(.*)/.exec(this.httpProps[N.COPY_SOURCE]); + if (match === null) { + throw new InternalAzuriteError( + `Request: x-ms-copy-source was not in the expected format (was "${ + this.httpProps[N.COPY_SOURCE] + }".` + ); + } + const source = match[1]; + const pathname = url.parse(source).pathname; + const parts = pathname.split("/"), + containerName = parts[0]; + parts.splice(0, 1); + const blobName = decodeURIComponent(parts.join("/")); // unicode characters in http headers are encoded! + const query = url.parse(source).query; + let date = undefined; + const regex = /snapshot=([^&]*)/; + const ssMatch = regex.exec(query); + if (ssMatch !== null) { + const dateStr = ssMatch[1]; + date = new Date(decodeURIComponent(dateStr)).toUTCString(); } + return { + sourceContainerName: containerName, + sourceBlobName: blobName, + date: date, + }; + } } -module.exports = AzuriteBlobRequest; \ No newline at end of file +module.exports = AzuriteBlobRequest; diff --git a/lib/model/blob/AzuriteContainerRequest.js b/lib/model/blob/AzuriteContainerRequest.js index 4b6f35540..12a6134b9 100644 --- a/lib/model/blob/AzuriteContainerRequest.js +++ b/lib/model/blob/AzuriteContainerRequest.js @@ -1,34 +1,33 @@ -'use strict'; +/** @format */ -const path = require('path'), - env = require('./../../core/env'), - EntityType = require('./../../core/Constants').StorageEntityType, - AzuriteRequest = require('./AzuriteRequest'); +"use strict"; -class AzuriteContainerRequest extends AzuriteRequest { - constructor({ - req = null, - payload = undefined }) { +const path = require("path"), + env = require("./../../core/env"), + EntityType = require("./../../core/Constants").StorageEntityType, + AzuriteRequest = require("./AzuriteRequest"); - super({ - req: req, - entityType: EntityType.Container, - payload: payload - }); +class AzuriteContainerRequest extends AzuriteRequest { + constructor({ req = null, payload = undefined }) { + super({ + req: req, + entityType: EntityType.Container, + payload: payload, + }); - this.containerName = req.params.container; - } + this.containerName = req.params.container; + } - /** - * Returns the full path on disk where the container (directory) will be created - * (e.g. /home/user1/azurite-workspace/__blobstorage__/my-container) - * - * @returns full path to container - * @memberof AzuriteContainerRequest - */ - fullPath() { - return path.join(env.localStoragePath, this.containerName); - } + /** + * Returns the full path on disk where the container (directory) will be created + * (e.g. /home/user1/azurite-workspace/__blobstorage__/my-container) + * + * @returns full path to container + * @memberof AzuriteContainerRequest + */ + fullPath() { + return path.join(env.localStoragePath, this.containerName); + } } -module.exports = AzuriteContainerRequest; \ No newline at end of file +module.exports = AzuriteContainerRequest; diff --git a/lib/model/blob/AzuriteRequest.js b/lib/model/blob/AzuriteRequest.js index f9d971cc5..a05342fae 100644 --- a/lib/model/blob/AzuriteRequest.js +++ b/lib/model/blob/AzuriteRequest.js @@ -1,131 +1,169 @@ -'use strict'; +/** @format */ -const crypto = require('crypto'), - N = require('./../../core/HttpHeaderNames'), - EntityType = require('./../../core/Constants').StorageEntityType, - etag = require('./../../core/utils'), - InternalAzuriteError = require('./../../core/InternalAzuriteError'); +"use strict"; -class AzuriteRequest { - constructor({ - req = undefined, - entityType = undefined, - payload = undefined }) { - - if (req === undefined) { - throw new InternalAzuriteError('AzuriteRequest: req cannot be undefined!'); - } +const crypto = require("crypto"), + N = require("./../../core/HttpHeaderNames"), + EntityType = require("./../../core/Constants").StorageEntityType, + etag = require("./../../core/utils"), + InternalAzuriteError = require("./../../core/InternalAzuriteError"); - this.httpProps = {}; - this.metaProps = {}; - this.body = req.body; - this.entityType = entityType; - this.query = req.query; - this.now = Date.now(); - this.payload = payload; - this._initMetaProps(req.rawHeaders); - this._initHttpProps(req.headers); +class AzuriteRequest { + constructor({ + req = undefined, + entityType = undefined, + payload = undefined, + }) { + if (req === undefined) { + throw new InternalAzuriteError( + "AzuriteRequest: req cannot be undefined!" + ); } - static clone(request) { - const copy = new AzuriteRequest(); - Object.assign(copy, request); - return copy; - } + this.httpProps = {}; + this.metaProps = {}; + this.body = req.body; + this.entityType = entityType; + this.query = req.query; + this.now = Date.now(); + this.payload = payload; + this._initMetaProps(req.rawHeaders); + this._initHttpProps(req.headers); + } - /** - * A container request cannot refer to a blob name (which is what publicName is about). - * - * @returns - * @memberof AzuriteRequest - */ - publicName() { - return undefined; - } + static clone(request) { + const copy = new AzuriteRequest(); + Object.assign(copy, request); + return copy; + } - /** - * Only Blobs can be snapshotted. - * - * @returns - * @memberof AzuriteRequest - */ - isSnapshot() { - return false; - } + /** + * A container request cannot refer to a blob name (which is what publicName is about). + * + * @returns + * @memberof AzuriteRequest + */ + publicName() { + return undefined; + } - leaseId() { - return this.httpProps[N.LEASE_ID]; - } + /** + * Only Blobs can be snapshotted. + * + * @returns + * @memberof AzuriteRequest + */ + isSnapshot() { + return false; + } - // Working on rawHeaders for meta attributes to preserve casing. - _initMetaProps(rawHeaders) { - this.metaProps = rawHeaders.map((e, i, a) => { - if (e.indexOf('x-ms-meta-') !== -1) { - e = e.replace('x-ms-meta-', ''); - const o = {}; - o[e] = a[i + 1]; - return o; - } - }).filter((e) => { - return e !== undefined; - }).reduce((acc, e) => { - const key = Object.keys(e)[0]; - acc[key] = e[key]; - return acc; - }, {}); - } + leaseId() { + return this.httpProps[N.LEASE_ID]; + } - _initHttpProps(httpHeaders) { - this.httpProps[N.CONTENT_LENGTH] = httpHeaders['Content-Length'] || httpHeaders['content-length']; - this.httpProps[N.ORIGIN] = httpHeaders['origin']; - this.httpProps[N.ACCESS_CONTROL_REQUEST_METHOD] = httpHeaders['access-control-request-method']; - this.httpProps[N.ACCESS_CONTROL_REQUEST_HEADERS] = httpHeaders['access-control-request-headers']; - // x-ms-* attributes have precedence over according HTTP-Headers - this.httpProps[N.CONTENT_TYPE] = httpHeaders['x-ms-blob-content-type'] || httpHeaders['content-type'] || 'application/octet-stream'; - this.httpProps[N.CONTENT_ENCODING] = httpHeaders['x-ms-blob-content-encoding'] || httpHeaders['content-encoding']; - this.httpProps[N.CONTENT_DISPOSITION] = httpHeaders['x-ms-blob-content-disposition'] || httpHeaders['content-disposition']; - this.httpProps[N.CACHE_CONTROL] = httpHeaders['x-ms-blob-cache-control'] || httpHeaders['cache-control']; - this.httpProps[N.CONTENT_LANGUAGE] = httpHeaders['x-ms-blob-content-language'] || httpHeaders['content-language']; - this.httpProps[N.CONTENT_MD5] = httpHeaders['x-ms-blob-content-md5'] || httpHeaders['content-md5']; - this.httpProps[N.RANGE] = httpHeaders['x-ms-range'] || httpHeaders['range']; + // Working on rawHeaders for meta attributes to preserve casing. + _initMetaProps(rawHeaders) { + this.metaProps = rawHeaders + .map((e, i, a) => { + if (e.indexOf("x-ms-meta-") !== -1) { + e = e.replace("x-ms-meta-", ""); + const o = {}; + o[e] = a[i + 1]; + return o; + } + }) + .filter((e) => { + return e !== undefined; + }) + .reduce((acc, e) => { + const key = Object.keys(e)[0]; + acc[key] = e[key]; + return acc; + }, {}); + } - this.httpProps[N.BLOB_TYPE] = httpHeaders['x-ms-blob-type'] - this.httpProps[N.RANGE_GET_CONTENT_MD5] = httpHeaders['x-ms-range-get-content-md5']; - this.httpProps[N.DELETE_SNAPSHOTS] = httpHeaders['x-ms-delete-snapshots']; - this.httpProps[N.LEASE_ID] = httpHeaders['x-ms-lease-id']; - this.httpProps[N.LEASE_ACTION] = httpHeaders['x-ms-lease-action'] ? httpHeaders['x-ms-lease-action'].toLowerCase() : undefined; - this.httpProps[N.LEASE_DURATION] = httpHeaders['x-ms-lease-duration']; - this.httpProps[N.LEASE_BREAK_PERIOD] = httpHeaders['x-ms-lease-break-period']; - this.httpProps[N.PROPOSED_LEASE_ID] = httpHeaders['x-ms-proposed-lease-id']; - this.httpProps[N.IF_MODFIFIED_SINCE] = httpHeaders['if-modified-since']; - this.httpProps[N.IF_UNMODIFIED_SINCE] = httpHeaders['if-unmodified-since']; - this.httpProps[N.IF_MATCH] = httpHeaders['if-match']; - this.httpProps[N.IF_NONE_MATCH] = httpHeaders['if-none-match']; - this.httpProps[N.SOURCE_IF_MODFIFIED_SINCE] = httpHeaders['x-ms-source-if-modified-since']; - this.httpProps[N.SOURCE_IF_UNMODIFIED_SINCE] = httpHeaders['x-ms-source-if-unmodified-since']; - this.httpProps[N.SOURCE_IF_MATCH] = httpHeaders['x-ms-source-if-match']; - this.httpProps[N.SOURCE_IF_NONE_MATCH] = httpHeaders['x-ms-source-if-none-match']; - this.httpProps[N.COPY_SOURCE] = httpHeaders['x-ms-copy-source']; + _initHttpProps(httpHeaders) { + this.httpProps[N.CONTENT_LENGTH] = + httpHeaders["Content-Length"] || httpHeaders["content-length"]; + this.httpProps[N.ORIGIN] = httpHeaders["origin"]; + this.httpProps[N.ACCESS_CONTROL_REQUEST_METHOD] = + httpHeaders["access-control-request-method"]; + this.httpProps[N.ACCESS_CONTROL_REQUEST_HEADERS] = + httpHeaders["access-control-request-headers"]; + // x-ms-* attributes have precedence over according HTTP-Headers + this.httpProps[N.CONTENT_TYPE] = + httpHeaders["x-ms-blob-content-type"] || + httpHeaders["content-type"] || + "application/octet-stream"; + this.httpProps[N.CONTENT_ENCODING] = + httpHeaders["x-ms-blob-content-encoding"] || + httpHeaders["content-encoding"]; + this.httpProps[N.CONTENT_DISPOSITION] = + httpHeaders["x-ms-blob-content-disposition"] || + httpHeaders["content-disposition"]; + this.httpProps[N.CACHE_CONTROL] = + httpHeaders["x-ms-blob-cache-control"] || httpHeaders["cache-control"]; + this.httpProps[N.CONTENT_LANGUAGE] = + httpHeaders["x-ms-blob-content-language"] || + httpHeaders["content-language"]; + this.httpProps[N.CONTENT_MD5] = + httpHeaders["x-ms-blob-content-md5"] || httpHeaders["content-md5"]; + this.httpProps[N.RANGE] = httpHeaders["x-ms-range"] || httpHeaders["range"]; - // As per spec @ https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl - // if this header is not specified it is set to 'private' per default. - this.httpProps[N.BLOB_PUBLIC_ACCESS] = httpHeaders['x-ms-blob-public-access'] || 'private'; - // Append Blobs specific - this.httpProps[N.BLOB_CONDITION_MAX_SIZE] = parseInt(httpHeaders['x-ms-blob-condition-maxsize']) || undefined; - this.httpProps[N.BLOB_CONDITION_APPENDPOS] = parseInt(httpHeaders['x-ms-blob-condition-appendpos']) || undefined; - // Page Blobs specific - this.httpProps[N.BLOB_CONTENT_LENGTH] = httpHeaders['x-ms-blob-content-length']; - this.httpProps[N.PAGE_WRITE] = httpHeaders['x-ms-page-write']; - this.httpProps[N.IF_SEQUENCE_NUMBER_LE] = httpHeaders['x-ms-if-sequence-number-le']; - this.httpProps[N.IF_SEQUENCE_NUMBER_LT] = httpHeaders['x-ms-if-sequence-number-lt']; - this.httpProps[N.IF_SEQUENCE_NUMBER_EQ] = httpHeaders['x-ms-if-sequence-number-eq']; + this.httpProps[N.BLOB_TYPE] = httpHeaders["x-ms-blob-type"]; + this.httpProps[N.RANGE_GET_CONTENT_MD5] = + httpHeaders["x-ms-range-get-content-md5"]; + this.httpProps[N.DELETE_SNAPSHOTS] = httpHeaders["x-ms-delete-snapshots"]; + this.httpProps[N.LEASE_ID] = httpHeaders["x-ms-lease-id"]; + this.httpProps[N.LEASE_ACTION] = httpHeaders["x-ms-lease-action"] + ? httpHeaders["x-ms-lease-action"].toLowerCase() + : undefined; + this.httpProps[N.LEASE_DURATION] = httpHeaders["x-ms-lease-duration"]; + this.httpProps[N.LEASE_BREAK_PERIOD] = + httpHeaders["x-ms-lease-break-period"]; + this.httpProps[N.PROPOSED_LEASE_ID] = httpHeaders["x-ms-proposed-lease-id"]; + this.httpProps[N.IF_MODFIFIED_SINCE] = httpHeaders["if-modified-since"]; + this.httpProps[N.IF_UNMODIFIED_SINCE] = httpHeaders["if-unmodified-since"]; + this.httpProps[N.IF_MATCH] = httpHeaders["if-match"]; + this.httpProps[N.IF_NONE_MATCH] = httpHeaders["if-none-match"]; + this.httpProps[N.SOURCE_IF_MODFIFIED_SINCE] = + httpHeaders["x-ms-source-if-modified-since"]; + this.httpProps[N.SOURCE_IF_UNMODIFIED_SINCE] = + httpHeaders["x-ms-source-if-unmodified-since"]; + this.httpProps[N.SOURCE_IF_MATCH] = httpHeaders["x-ms-source-if-match"]; + this.httpProps[N.SOURCE_IF_NONE_MATCH] = + httpHeaders["x-ms-source-if-none-match"]; + this.httpProps[N.COPY_SOURCE] = httpHeaders["x-ms-copy-source"]; - Object.keys(this.httpProps).forEach((key) => { - if (this.httpProps[key] === undefined || this.httpProps[key] === 'undefined') { - delete this.httpProps[key]; - } - }); - } + // As per spec @ https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl + // if this header is not specified it is set to 'private' per default. + this.httpProps[N.BLOB_PUBLIC_ACCESS] = + httpHeaders["x-ms-blob-public-access"] || "private"; + // Append Blobs specific + this.httpProps[N.BLOB_CONDITION_MAX_SIZE] = + parseInt(httpHeaders["x-ms-blob-condition-maxsize"]) || undefined; + this.httpProps[N.BLOB_CONDITION_APPENDPOS] = + parseInt(httpHeaders["x-ms-blob-condition-appendpos"]) || undefined; + // Page Blobs specific + this.httpProps[N.BLOB_CONTENT_LENGTH] = + httpHeaders["x-ms-blob-content-length"]; + this.httpProps[N.PAGE_WRITE] = httpHeaders["x-ms-page-write"]; + this.httpProps[N.IF_SEQUENCE_NUMBER_LE] = + httpHeaders["x-ms-if-sequence-number-le"]; + this.httpProps[N.IF_SEQUENCE_NUMBER_LT] = + httpHeaders["x-ms-if-sequence-number-lt"]; + this.httpProps[N.IF_SEQUENCE_NUMBER_EQ] = + httpHeaders["x-ms-if-sequence-number-eq"]; + + Object.keys(this.httpProps).forEach((key) => { + if ( + this.httpProps[key] === undefined || + this.httpProps[key] === "undefined" + ) { + delete this.httpProps[key]; + } + }); + } } -module.exports = AzuriteRequest; \ No newline at end of file +module.exports = AzuriteRequest; diff --git a/lib/model/blob/AzuriteResponse.js b/lib/model/blob/AzuriteResponse.js index 4783e5f00..71c4f3cac 100644 --- a/lib/model/blob/AzuriteResponse.js +++ b/lib/model/blob/AzuriteResponse.js @@ -1,56 +1,64 @@ -'use strict'; +/** @format */ -const uuidV1 = require('uuid/v1'), - N = require('./../../core/HttpHeaderNames'), - EntityType = require('./../../core/Constants').StorageEntityType; +"use strict"; + +const uuidV1 = require("uuid/v1"), + N = require("./../../core/HttpHeaderNames"), + EntityType = require("./../../core/Constants").StorageEntityType; class AzuriteResponse { - constructor({ proxy = undefined, payload = undefined, query = {}, cors = undefined } = {}) { - this.httpProps = {}; - this.proxy = proxy; - if (this.proxy) { - this.httpProps[N.ETAG] = `\"${this.proxy.original.etag}\"`; - this.httpProps[N.LAST_MODIFIED] = this.proxy.lastModified(); - Object.keys(this.proxy.original.metaProps).forEach((key) => { - this.httpProps[`x-ms-meta-${key}`] = this.proxy.original.metaProps[key]; - }); - - if (proxy.original.entityType === EntityType.AppendBlob) { - this.httpProps[N.BLOB_COMMITTED_BLOCK_COUNT] = proxy.original[N.BLOB_COMMITTED_BLOCK_COUNT]; - this.httpProps[N.BLOB_APPEND_OFFSET] = proxy.original.size; - } - - if (proxy.original.entityType === EntityType.PageBlob) { - this.httpProps[N.SEQUENCE_NUMBER] = proxy.original.sequenceNumber; - } - } - this.httpProps[N.VERSION] = '2016-05-31'; - this.httpProps[N.DATE] = new Date().toGMTString(); - this.httpProps[N.CONTENT_LENGTH] = 0; - this.httpProps[N.REQUEST_ID] = uuidV1(); - this.payload = payload; - - if (cors !== undefined) { - this.httpProps[N.ACCESS_CONTROL_ALLOW_ORIGIN] = cors.origin; - this.httpProps[N.ACCESS_CONTROL_EXPOSE_HEADERS] = cors.exposedHeaders; - this.httpProps[N.ACCESS_CONTROL_ALLOW_CREDENTIALS] = true; - this.httpProps[N.ACCESS_CONTROL_ALLOW_HEADERS] = cors.exposedHeaders; - } + constructor({ + proxy = undefined, + payload = undefined, + query = {}, + cors = undefined, + } = {}) { + this.httpProps = {}; + this.proxy = proxy; + if (this.proxy) { + this.httpProps[N.ETAG] = `\"${this.proxy.original.etag}\"`; + this.httpProps[N.LAST_MODIFIED] = this.proxy.lastModified(); + Object.keys(this.proxy.original.metaProps).forEach((key) => { + this.httpProps[`x-ms-meta-${key}`] = this.proxy.original.metaProps[key]; + }); + + if (proxy.original.entityType === EntityType.AppendBlob) { + this.httpProps[N.BLOB_COMMITTED_BLOCK_COUNT] = + proxy.original[N.BLOB_COMMITTED_BLOCK_COUNT]; + this.httpProps[N.BLOB_APPEND_OFFSET] = proxy.original.size; + } + + if (proxy.original.entityType === EntityType.PageBlob) { + this.httpProps[N.SEQUENCE_NUMBER] = proxy.original.sequenceNumber; + } } + this.httpProps[N.VERSION] = "2016-05-31"; + this.httpProps[N.DATE] = new Date().toGMTString(); + this.httpProps[N.CONTENT_LENGTH] = 0; + this.httpProps[N.REQUEST_ID] = uuidV1(); + this.payload = payload; - addHttpProperty(key, value) { - if (value !== undefined) { - this.httpProps[key] = value; - } + if (cors !== undefined) { + this.httpProps[N.ACCESS_CONTROL_ALLOW_ORIGIN] = cors.origin; + this.httpProps[N.ACCESS_CONTROL_EXPOSE_HEADERS] = cors.exposedHeaders; + this.httpProps[N.ACCESS_CONTROL_ALLOW_CREDENTIALS] = true; + this.httpProps[N.ACCESS_CONTROL_ALLOW_HEADERS] = cors.exposedHeaders; } + } - sasOverrideHeaders(query) { - this.addHttpProperty(N.CACHE_CONTROL, query.rscc); - this.addHttpProperty(N.CONTENT_DISPOSITION, query.rscd); - this.addHttpProperty(N.CONTENT_ENCODING, query.rsce); - this.addHttpProperty(N.CONTENT_LANGUAGE, query.rscl); - this.addHttpProperty(N.CONTENT_TYPE, query.rsct); + addHttpProperty(key, value) { + if (value !== undefined) { + this.httpProps[key] = value; } + } + + sasOverrideHeaders(query) { + this.addHttpProperty(N.CACHE_CONTROL, query.rscc); + this.addHttpProperty(N.CONTENT_DISPOSITION, query.rscd); + this.addHttpProperty(N.CONTENT_ENCODING, query.rsce); + this.addHttpProperty(N.CONTENT_LANGUAGE, query.rscl); + this.addHttpProperty(N.CONTENT_TYPE, query.rsct); + } } -module.exports = AzuriteResponse; \ No newline at end of file +module.exports = AzuriteResponse; diff --git a/lib/model/blob/BlobProxy.js b/lib/model/blob/BlobProxy.js index 89ead0c12..8f9556175 100644 --- a/lib/model/blob/BlobProxy.js +++ b/lib/model/blob/BlobProxy.js @@ -1,43 +1,49 @@ -'use strict'; +/** @format */ -const crypto = require('crypto'), - StorageEntityProxy = require('./StorageEntityProxy'), - etag = require('./../../core/utils').computeEtag, - InternalAzuriteError = require('./../../core/InternalAzuriteError'); +"use strict"; + +const crypto = require("crypto"), + StorageEntityProxy = require("./StorageEntityProxy"), + etag = require("./../../core/utils").computeEtag, + InternalAzuriteError = require("./../../core/InternalAzuriteError"); /** - * Serves as a blob proxy to the corresponding LokiJS object. - * + * Serves as a blob proxy to the corresponding LokiJS object. + * * @class BlobProxy */ class BlobProxy extends StorageEntityProxy { - constructor(original, containerName) { - super(original); - if (!containerName) { - throw new InternalAzuriteError('BlobProxy: missing containerName'); - } - this.containerName = containerName; + constructor(original, containerName) { + super(original); + if (!containerName) { + throw new InternalAzuriteError("BlobProxy: missing containerName"); } + this.containerName = containerName; + } - static createFromArray(entities, containerName) { - let array = []; - for (const entity of entities) { - array.push(new BlobProxy(entity, containerName)); - } - return array; + static createFromArray(entities, containerName) { + let array = []; + for (const entity of entities) { + array.push(new BlobProxy(entity, containerName)); } + return array; + } - /** - * Updates and returns the strong ETag of the underlying blob. - * - * @returns - * @memberof BlobProxy - */ - updateETag() { - const etagValue = etag(`${this.lastModified()}${JSON.stringify(this.original.metaProps)}${this.original.id}${this.original.meta.revision}`); - this.original.etag = `${etagValue}`; - return this.original.etag; - } + /** + * Updates and returns the strong ETag of the underlying blob. + * + * @returns + * @memberof BlobProxy + */ + updateETag() { + const etagValue = etag( + `${this.lastModified()}${JSON.stringify(this.original.metaProps)}${ + this.original.id + }${this.original.meta.revision}` + ); + this.original.etag = `${etagValue}`; + return this.original.etag; + } } -module.exports = BlobProxy; \ No newline at end of file +module.exports = BlobProxy; diff --git a/lib/model/blob/ContainerProxy.js b/lib/model/blob/ContainerProxy.js index f1f6968f4..85a857397 100644 --- a/lib/model/blob/ContainerProxy.js +++ b/lib/model/blob/ContainerProxy.js @@ -1,31 +1,36 @@ -'use strict'; +/** @format */ -const crypto = require('crypto'), - etag = require('./../../core/utils').computeEtag, - StorageEntityProxy = require('./StorageEntityProxy'); +"use strict"; +const crypto = require("crypto"), + etag = require("./../../core/utils").computeEtag, + StorageEntityProxy = require("./StorageEntityProxy"); /** - * Serves as a container proxy to the corresponding LokiJS object. - * + * Serves as a container proxy to the corresponding LokiJS object. + * * @class ContainerProxy */ class ContainerProxy extends StorageEntityProxy { - constructor(original) { - super(original); - } + constructor(original) { + super(original); + } - /** - * Updates and returns the strong ETag of the underlying container. - * - * @returns - * @memberof ContainerProxy - */ - updateETag() { - const etagValue = etag(`${this.lastModified()}${JSON.stringify(this.original.metaProps)}${this.original.name}${this.original.meta.revision}`); - this.original.etag = `${etagValue}`; - return this.original.etag; - } + /** + * Updates and returns the strong ETag of the underlying container. + * + * @returns + * @memberof ContainerProxy + */ + updateETag() { + const etagValue = etag( + `${this.lastModified()}${JSON.stringify(this.original.metaProps)}${ + this.original.name + }${this.original.meta.revision}` + ); + this.original.etag = `${etagValue}`; + return this.original.etag; + } } -module.exports = ContainerProxy; \ No newline at end of file +module.exports = ContainerProxy; diff --git a/lib/model/blob/StorageEntityGenerator.js b/lib/model/blob/StorageEntityGenerator.js index 0760807c8..ded7a1fad 100644 --- a/lib/model/blob/StorageEntityGenerator.js +++ b/lib/model/blob/StorageEntityGenerator.js @@ -1,121 +1,174 @@ -'use strict'; +/** @format */ -const EntityType = require('./../../core/Constants').StorageEntityType, - N = require('./../../core/HttpHeaderNames'), - etag = require('./../../core/utils').computeEtag; +"use strict"; + +const EntityType = require("./../../core/Constants").StorageEntityType, + N = require("./../../core/HttpHeaderNames"), + etag = require("./../../core/utils").computeEtag; /** * Generates an according Storage Entity (@type Container or @type Blob) out of a @ref AzuriteRequest object. - * + * * @class StorageEntityGenerator */ class StorageEntityGenerator { - constructor() { - } + constructor() {} - /** - * Generates a persistable storage entity respresentation based on a @type AzuriteRequest object - * - * @returns - * @memberof StorageEntityGenerator - */ - generateStorageEntity(request) { - const entity = {}; - // Common to all entities (containers and blobs) - entity.metaProps = request.metaProps; - entity.entityType = request.entityType; - entity.leaseState = 'available'; - entity.access = 'private'; + /** + * Generates a persistable storage entity respresentation based on a @type AzuriteRequest object + * + * @returns + * @memberof StorageEntityGenerator + */ + generateStorageEntity(request) { + const entity = {}; + // Common to all entities (containers and blobs) + entity.metaProps = request.metaProps; + entity.entityType = request.entityType; + entity.leaseState = "available"; + entity.access = "private"; - if (request.entityType === EntityType.Container) { - entity.name = request.containerName; - entity.access = request.httpProps[N.BLOB_PUBLIC_ACCESS]; - entity.etag = etag(`${Date.parse(new Date())}${JSON.stringify(entity.metaProps)}${request.containerName}`); - } else { - // Common to all blobs - entity.name = request.blobName; - entity.id = request.id; - // Parent ID refers to the blob a block belongs to - entity.parentId = request.parentId; entity.parentId === undefined ? delete entity.parentId : (() => {/*NOOP*/ }); - // Origin ID refers to the blob a snapshot belongs to - entity.originId = request.originId; entity.originId === undefined ? delete entity.originId : (() => {/*NOOP*/ }); - entity.uri = request.uri; - entity.snapshot = false; - entity.committed = request.commit; // this is true per default - entity.md5 = request.httpProps[N.CONTENT_MD5] || request.calculateContentMd5(); - entity.size = request.body ? request.body.length : 0; - entity.etag = etag(`${Date.parse(new Date())}${JSON.stringify(entity.metaProps)}${request.id}`); - // The following attributes are deleted if they are undefined - entity.cacheControl = request.httpProps[N.CACHE_CONTROL]; entity.cacheControl === undefined ? delete entity.cacheControl : (() => {/*NOOP*/ }); - entity.contentType = request.httpProps[N.CONTENT_TYPE]; entity.contentType === undefined ? delete entity.contentType : (() => {/*NOOP*/ }); - entity.contentEncoding = request.httpProps[N.CONTENT_ENCODING]; entity.contentEncoding === undefined ? delete entity.contentEncoding : (() => {/*NOOP*/ }); - entity.contentLanguage = request.httpProps[N.CONTENT_LANGUAGE]; entity.contentLanguage === undefined ? delete entity.contentLanguage : (() => {/*NOOP*/ }); - entity.contentDisposition = request.httpProps[N.CONTENT_DISPOSITION]; entity.contentDisposition === undefined ? delete entity.contentDisposition : (() => {/*NOOP*/ }); - entity.md5 = request.httpProps[N.CONTENT_MD5]; entity.md5 === undefined ? delete entity.md5 : (() => {/*NOOP*/ }); - } - // Specific to Append Blobs - if (request.entityType === EntityType.AppendBlob) { - entity[N.BLOB_COMMITTED_BLOCK_COUNT] = 0; - // According to https://docs.microsoft.com/en-us/rest/api/storageservices/append-block the MD5 hash which is - // optionally set in Content-MD5 header is not stored with the blob, thus we delete it. - delete entity.md5; - } - // Specific to Block Blobs that are potentially part of a commit - else if (request.entityType === EntityType.BlockBlob && request.blockId !== undefined) { - entity.blockId = request.blockId; - // entity.parent = `${request.containerName}-${request.blobName}`; - // entity.name = `${entity.parent}-${entity.blockId}`; - entity.committed = false; - } - // Specific to Page Blobs - else if (request.entityType === EntityType.PageBlob) { - entity.size = request.httpProps[N.BLOB_CONTENT_LENGTH]; - entity.sequenceNumber = 0; - // MD5 calculation of a page blob seems to be wrong, thus deleting it for now... - delete entity.md5; - } - return entity; + if (request.entityType === EntityType.Container) { + entity.name = request.containerName; + entity.access = request.httpProps[N.BLOB_PUBLIC_ACCESS]; + entity.etag = etag( + `${Date.parse(new Date())}${JSON.stringify(entity.metaProps)}${ + request.containerName + }` + ); + } else { + // Common to all blobs + entity.name = request.blobName; + entity.id = request.id; + // Parent ID refers to the blob a block belongs to + entity.parentId = request.parentId; + entity.parentId === undefined + ? delete entity.parentId + : () => { + /*NOOP*/ + }; + // Origin ID refers to the blob a snapshot belongs to + entity.originId = request.originId; + entity.originId === undefined + ? delete entity.originId + : () => { + /*NOOP*/ + }; + entity.uri = request.uri; + entity.snapshot = false; + entity.committed = request.commit; // this is true per default + entity.md5 = + request.httpProps[N.CONTENT_MD5] || request.calculateContentMd5(); + entity.size = request.body ? request.body.length : 0; + entity.etag = etag( + `${Date.parse(new Date())}${JSON.stringify(entity.metaProps)}${ + request.id + }` + ); + // The following attributes are deleted if they are undefined + entity.cacheControl = request.httpProps[N.CACHE_CONTROL]; + entity.cacheControl === undefined + ? delete entity.cacheControl + : () => { + /*NOOP*/ + }; + entity.contentType = request.httpProps[N.CONTENT_TYPE]; + entity.contentType === undefined + ? delete entity.contentType + : () => { + /*NOOP*/ + }; + entity.contentEncoding = request.httpProps[N.CONTENT_ENCODING]; + entity.contentEncoding === undefined + ? delete entity.contentEncoding + : () => { + /*NOOP*/ + }; + entity.contentLanguage = request.httpProps[N.CONTENT_LANGUAGE]; + entity.contentLanguage === undefined + ? delete entity.contentLanguage + : () => { + /*NOOP*/ + }; + entity.contentDisposition = request.httpProps[N.CONTENT_DISPOSITION]; + entity.contentDisposition === undefined + ? delete entity.contentDisposition + : () => { + /*NOOP*/ + }; + entity.md5 = request.httpProps[N.CONTENT_MD5]; + entity.md5 === undefined + ? delete entity.md5 + : () => { + /*NOOP*/ + }; + } + // Specific to Append Blobs + if (request.entityType === EntityType.AppendBlob) { + entity[N.BLOB_COMMITTED_BLOCK_COUNT] = 0; + // According to https://docs.microsoft.com/en-us/rest/api/storageservices/append-block the MD5 hash which is + // optionally set in Content-MD5 header is not stored with the blob, thus we delete it. + delete entity.md5; + } + // Specific to Block Blobs that are potentially part of a commit + else if ( + request.entityType === EntityType.BlockBlob && + request.blockId !== undefined + ) { + entity.blockId = request.blockId; + // entity.parent = `${request.containerName}-${request.blobName}`; + // entity.name = `${entity.parent}-${entity.blockId}`; + entity.committed = false; + } + // Specific to Page Blobs + else if (request.entityType === EntityType.PageBlob) { + entity.size = request.httpProps[N.BLOB_CONTENT_LENGTH]; + entity.sequenceNumber = 0; + // MD5 calculation of a page blob seems to be wrong, thus deleting it for now... + delete entity.md5; } + return entity; + } - clone(o) { - const copy = {}; - copy.metaProps = o.metaProps; - copy.entityType = o.entityType; - copy.leaseState = o.leaseState; - copy.access = o.access; - copy.name = o.name; - copy.etag = o.etag; - if (o.entityType !== EntityType.Container) { - copy.snapshot = o.snapshot; - copy.committed = o.committed; - copy.md5 = o.md5; - copy.size = o.size; - if (o.cacheControl) { - copy.cacheControl = o.cacheControl; - } - if (o.contentType) { - copy.contentType = o.contentType; - } - if (o.contentEncoding) { - copy.contentEncoding = o.contentEncoding; - } - if (o.contentLanguage) { - copy.contentLanguage = o.contentLanguage; - } - if (o.entityType === EntityType.AppendBlob) { - copy[N.BLOB_COMMITTED_BLOCK_COUNT] = o[N.BLOB_COMMITTED_BLOCK_COUNT]; - } - if (o.entityType === EntityType.BlockBlob && o.blockId !== null) { - copy.blockId = o.blockId; - copy.parent = o.parent; - } - if (o.entityType === EntityType.PageBlob) { - copy.size = o.size - copy.sequenceNumber = o.sequenceNumber; - } - } - return copy; + clone(o) { + const copy = {}; + copy.metaProps = o.metaProps; + copy.entityType = o.entityType; + copy.leaseState = o.leaseState; + copy.access = o.access; + copy.name = o.name; + copy.etag = o.etag; + if (o.entityType !== EntityType.Container) { + copy.snapshot = o.snapshot; + copy.committed = o.committed; + copy.md5 = o.md5; + copy.size = o.size; + if (o.cacheControl) { + copy.cacheControl = o.cacheControl; + } + if (o.contentType) { + copy.contentType = o.contentType; + } + if (o.contentEncoding) { + copy.contentEncoding = o.contentEncoding; + } + if (o.contentLanguage) { + copy.contentLanguage = o.contentLanguage; + } + if (o.entityType === EntityType.AppendBlob) { + copy[N.BLOB_COMMITTED_BLOCK_COUNT] = o[N.BLOB_COMMITTED_BLOCK_COUNT]; + } + if (o.entityType === EntityType.BlockBlob && o.blockId !== null) { + copy.blockId = o.blockId; + copy.parent = o.parent; + } + if (o.entityType === EntityType.PageBlob) { + copy.size = o.size; + copy.sequenceNumber = o.sequenceNumber; + } } + return copy; + } } -module.exports = new StorageEntityGenerator(); \ No newline at end of file +module.exports = new StorageEntityGenerator(); diff --git a/lib/model/blob/StorageEntityProxy.js b/lib/model/blob/StorageEntityProxy.js index 204b0c8a1..2f8ada81c 100644 --- a/lib/model/blob/StorageEntityProxy.js +++ b/lib/model/blob/StorageEntityProxy.js @@ -1,72 +1,78 @@ -'use strict'; +/** @format */ -const crypto = require('crypto'), - BbPromise = require('bluebird'), - fs = BbPromise.promisifyAll(require("fs-extra")), - InternalAzuriteError = require('./../../core/InternalAzuriteError'); +"use strict"; + +const crypto = require("crypto"), + BbPromise = require("bluebird"), + fs = BbPromise.promisifyAll(require("fs-extra")), + InternalAzuriteError = require("./../../core/InternalAzuriteError"); /** * DO NOT INSTANTIATE. - * Serves as the base class proxy to the corresponding LokiJS object, which could be either a container or a blob. - * + * Serves as the base class proxy to the corresponding LokiJS object, which could be either a container or a blob. + * * @class StorageEntityProxy */ class StorageEntityProxy { - constructor(original) { - if (!original) { - throw new InternalAzuriteError('StorageEntityProxy: missing original'); - } - this.original = original; + constructor(original) { + if (!original) { + throw new InternalAzuriteError("StorageEntityProxy: missing original"); } + this.original = original; + } - release() { - this.updateLeaseState(); - this.updateETag(); - return this.original; - } + release() { + this.updateLeaseState(); + this.updateETag(); + return this.original; + } - /** - * Updates and returns the lease state of the storage item based on its internal state. - * Changes to the underlying LokiJS object are automatically persisted by LokiJS. - * - * @returns - * @memberof StorageEntityProxy - */ - updateLeaseState() { - const now = Date.now(); - switch (this.original.leaseState) { - // Has breaking period expired? - case 'breaking': - this.original.leaseState = (this.original.leaseBrokenAt <= now) ? 'broken' : 'breaking'; - break; - // Has lease expired? - case 'leased': - // Infinite Lease - if (this.original.leaseExpiredAt === -1) { - this.original.leaseState = 'leased'; - } else { - this.original.leaseState = (this.original.leaseExpiredAt <= now) ? 'expired' : 'leased'; - } - break; - default: - this.original.leaseState = this.original.leaseState || 'available'; + /** + * Updates and returns the lease state of the storage item based on its internal state. + * Changes to the underlying LokiJS object are automatically persisted by LokiJS. + * + * @returns + * @memberof StorageEntityProxy + */ + updateLeaseState() { + const now = Date.now(); + switch (this.original.leaseState) { + // Has breaking period expired? + case "breaking": + this.original.leaseState = + this.original.leaseBrokenAt <= now ? "broken" : "breaking"; + break; + // Has lease expired? + case "leased": + // Infinite Lease + if (this.original.leaseExpiredAt === -1) { + this.original.leaseState = "leased"; + } else { + this.original.leaseState = + this.original.leaseExpiredAt <= now ? "expired" : "leased"; } - return this.original.leaseState; + break; + default: + this.original.leaseState = this.original.leaseState || "available"; } + return this.original.leaseState; + } - updateETag() { - throw new InternalAzuriteError('updateETag not implemented!'); - } + updateETag() { + throw new InternalAzuriteError("updateETag not implemented!"); + } - /** - * Returns the date and time the storage entity was last modified. The date format follows RFC 1123. - * - * @returns - * @memberof StorageEntityProxy - */ - lastModified() { - return new Date(this.original.meta.updated || this.original.meta.created).toUTCString(); - } + /** + * Returns the date and time the storage entity was last modified. The date format follows RFC 1123. + * + * @returns + * @memberof StorageEntityProxy + */ + lastModified() { + return new Date( + this.original.meta.updated || this.original.meta.created + ).toUTCString(); + } } -module.exports = StorageEntityProxy; \ No newline at end of file +module.exports = StorageEntityProxy; diff --git a/lib/model/queue/AzuriteQueueRequest.js b/lib/model/queue/AzuriteQueueRequest.js index f47da0fe9..a0ed80c98 100644 --- a/lib/model/queue/AzuriteQueueRequest.js +++ b/lib/model/queue/AzuriteQueueRequest.js @@ -1,59 +1,63 @@ -'use strict'; +/** @format */ -const Operations = require('./../../core/Constants').Operations.Queue; +"use strict"; -class AzuriteQueueRequest { - constructor({ - req = undefined, - payload = undefined, - operation = undefined }) { - - if (req === undefined) { - throw new InternalAzuriteError('AzuriteQueueRequest: req must not be undefined!'); - } - - this.queueName = req.params.queue; - this.messageId = req.params.messageId || undefined; - this.metaProps = {}; - this.query = req.query; - this.bodyLength = req.body ? req.body.length : 0; - this.now = Date.parse(new Date()) / 1000; // current time in seconds - this.payload = payload; - this.numOfMessages = parseInt(req.query.numofmessages) || 1; - switch (operation) { - case Operations.PUT_MESSAGE: - this.visibilityTimeout = parseInt(req.query.visibilitytimeout) || 0; - break;tmp - case Operations.GET_MESSAGE: - const tmp = parseInt(req.query.visibilitytimeout); - this.visibilityTimeout = (!tmp) ? 30 : tmp; - break; - default: - this.visibilityTimeout = parseInt(req.query.visibilitytimeout) || 0; - } - this.messageTtl = parseInt(req.query.messagettl) || 60 * 60 * 24 * 7; // 7 days in seconds - this.popReceipt = req.query.popreceipt || undefined; +const Operations = require("./../../core/Constants").Operations.Queue; - this._initMetaProps(req.rawHeaders); +class AzuriteQueueRequest { + constructor({ req = undefined, payload = undefined, operation = undefined }) { + if (req === undefined) { + throw new InternalAzuriteError( + "AzuriteQueueRequest: req must not be undefined!" + ); } - // Working on rawHeaders for meta attributes to preserve casing. - _initMetaProps(rawHeaders) { - this.metaProps = rawHeaders.map((e, i, a) => { - if (e.indexOf('x-ms-meta-') !== -1) { - e = e.replace('x-ms-meta-', ''); - const o = {}; - o[e] = a[i + 1]; - return o; - } - }).filter((e) => { - return e !== undefined; - }).reduce((acc, e) => { - const key = Object.keys(e)[0]; - acc[key] = e[key]; - return acc; - }, {}); + this.queueName = req.params.queue; + this.messageId = req.params.messageId || undefined; + this.metaProps = {}; + this.query = req.query; + this.bodyLength = req.body ? req.body.length : 0; + this.now = Date.parse(new Date()) / 1000; // current time in seconds + this.payload = payload; + this.numOfMessages = parseInt(req.query.numofmessages) || 1; + switch (operation) { + case Operations.PUT_MESSAGE: + this.visibilityTimeout = parseInt(req.query.visibilitytimeout) || 0; + break; + tmp; + case Operations.GET_MESSAGE: + const tmp = parseInt(req.query.visibilitytimeout); + this.visibilityTimeout = !tmp ? 30 : tmp; + break; + default: + this.visibilityTimeout = parseInt(req.query.visibilitytimeout) || 0; } + this.messageTtl = parseInt(req.query.messagettl) || 60 * 60 * 24 * 7; // 7 days in seconds + this.popReceipt = req.query.popreceipt || undefined; + + this._initMetaProps(req.rawHeaders); + } + + // Working on rawHeaders for meta attributes to preserve casing. + _initMetaProps(rawHeaders) { + this.metaProps = rawHeaders + .map((e, i, a) => { + if (e.indexOf("x-ms-meta-") !== -1) { + e = e.replace("x-ms-meta-", ""); + const o = {}; + o[e] = a[i + 1]; + return o; + } + }) + .filter((e) => { + return e !== undefined; + }) + .reduce((acc, e) => { + const key = Object.keys(e)[0]; + acc[key] = e[key]; + return acc; + }, {}); + } } -module.exports = AzuriteQueueRequest; \ No newline at end of file +module.exports = AzuriteQueueRequest; diff --git a/lib/model/queue/AzuriteQueueResponse.js b/lib/model/queue/AzuriteQueueResponse.js index 6b5b828e5..0787188d5 100644 --- a/lib/model/queue/AzuriteQueueResponse.js +++ b/lib/model/queue/AzuriteQueueResponse.js @@ -1,27 +1,29 @@ -'use strict'; +/** @format */ -const uuidV1 = require('uuid/v1'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const uuidV1 = require("uuid/v1"), + N = require("./../../core/HttpHeaderNames"); class AzuriteQueueResponse { - constructor() { - this.httpProps = {}; - this.httpProps[N.VERSION] = '2016-05-31'; - this.httpProps[N.DATE] = new Date().toGMTString(); - this.httpProps[N.REQUEST_ID] = uuidV1(); - } + constructor() { + this.httpProps = {}; + this.httpProps[N.VERSION] = "2016-05-31"; + this.httpProps[N.DATE] = new Date().toGMTString(); + this.httpProps[N.REQUEST_ID] = uuidV1(); + } - addHttpProperty(key, value) { - if (value !== undefined) { - this.httpProps[key] = value; - } + addHttpProperty(key, value) { + if (value !== undefined) { + this.httpProps[key] = value; } + } - addMetaProps(metaProps) { - Object.keys(metaProps).forEach((key) => { - this.addHttpProperty(`x-ms-meta-${key}`, metaProps[key]); - }); - } + addMetaProps(metaProps) { + Object.keys(metaProps).forEach((key) => { + this.addHttpProperty(`x-ms-meta-${key}`, metaProps[key]); + }); + } } module.exports = AzuriteQueueResponse; diff --git a/lib/model/queue/Message.js b/lib/model/queue/Message.js index 09443ff2c..1e703409a 100644 --- a/lib/model/queue/Message.js +++ b/lib/model/queue/Message.js @@ -1,46 +1,48 @@ -'use strict' +/** @format */ -const uuidv4 = require('uuid/v4'); +"use strict"; + +const uuidv4 = require("uuid/v4"); /** * Abstraction of a queue message. - * + * * @class Message */ class Message { - /** - * Creates an instance of message. - * @param {String} msg the queue message. - * @param {any} visibilityTimeout defines the time interval it is not visible to other clients - * after it has been retrieved - * @param {any} messageTtl time to live of the message - * @memberof Item - */ - constructor(now, msg, visibilityTimeout, messageTtl) { - this.msg = msg; - this.expirationTime = now + messageTtl; - this.visibilityTimeout = visibilityTimeout; - this.timeNextVisible = now + visibilityTimeout; - this.messageId = uuidv4(); - this.insertionTime = now; - this.popReceipt = uuidv4(); - this.dequeueCount = 0; - } + /** + * Creates an instance of message. + * @param {String} msg the queue message. + * @param {any} visibilityTimeout defines the time interval it is not visible to other clients + * after it has been retrieved + * @param {any} messageTtl time to live of the message + * @memberof Item + */ + constructor(now, msg, visibilityTimeout, messageTtl) { + this.msg = msg; + this.expirationTime = now + messageTtl; + this.visibilityTimeout = visibilityTimeout; + this.timeNextVisible = now + visibilityTimeout; + this.messageId = uuidv4(); + this.insertionTime = now; + this.popReceipt = uuidv4(); + this.dequeueCount = 0; + } - renewPopReceipt() { - this.popReceipt = uuidv4(); - } + renewPopReceipt() { + this.popReceipt = uuidv4(); + } - visible() { - const now = Date.parse(new Date()) / 1000; - return this.timeNextVisible === undefined || now >= this.timeNextVisible; - } + visible() { + const now = Date.parse(new Date()) / 1000; + return this.timeNextVisible === undefined || now >= this.timeNextVisible; + } - updateVisibilityTimeout(visibilityTimeout) { - this.visibilityTimeout = visibilityTimeout; - const now = Date.parse(new Date()) / 1000; - this.timeNextVisible = now + this.visibilityTimeout; - } + updateVisibilityTimeout(visibilityTimeout) { + this.visibilityTimeout = visibilityTimeout; + const now = Date.parse(new Date()) / 1000; + this.timeNextVisible = now + this.visibilityTimeout; + } } -module.exports = Message; \ No newline at end of file +module.exports = Message; diff --git a/lib/model/queue/Queue.js b/lib/model/queue/Queue.js index 515731ee1..b1f1eddcc 100644 --- a/lib/model/queue/Queue.js +++ b/lib/model/queue/Queue.js @@ -1,131 +1,144 @@ -'use strict'; +/** @format */ -const Message = require('./Message'), - InternalAzuriteError = require('./../../core/InternalAzuriteError'); +"use strict"; -class Queue { - constructor(metaProps = {}) { - this.metaProps = metaProps; - this.messages = []; - } +const Message = require("./Message"), + InternalAzuriteError = require("./../../core/InternalAzuriteError"); - put({ now, msg, visibilityTimeout, messageTtl }) { - const message = new Message(now, msg, visibilityTimeout, messageTtl); - this.messages.push(message); - return message; - } +class Queue { + constructor(metaProps = {}) { + this.metaProps = metaProps; + this.messages = []; + } - gett({ numOfMessages = 1, visibilityTimeout = 30 }) { - const visibleItems = this.messages - .filter((i) => { return i.visible() }) - .slice(0, numOfMessages) - .map((i) => { - ++i.dequeueCount; - if (i.dequeueCount > 1) { // popreceipt is already been set initially - i.renewPopReceipt(); - } - i.updateVisibilityTimeout(visibilityTimeout) - return i; - }); - return visibleItems; - } + put({ now, msg, visibilityTimeout, messageTtl }) { + const message = new Message(now, msg, visibilityTimeout, messageTtl); + this.messages.push(message); + return message; + } - peek(numOfMessages = 1) { - const visibleItems = this.messages - .filter((i) => { return i.visible() }) - .slice(0, numOfMessages) - .map((i) => { - return i; - }); - return visibleItems; - } + gett({ numOfMessages = 1, visibilityTimeout = 30 }) { + const visibleItems = this.messages + .filter((i) => { + return i.visible(); + }) + .slice(0, numOfMessages) + .map((i) => { + ++i.dequeueCount; + if (i.dequeueCount > 1) { + // popreceipt is already been set initially + i.renewPopReceipt(); + } + i.updateVisibilityTimeout(visibilityTimeout); + return i; + }); + return visibleItems; + } - /** - * The Delete Message operation deletes the specified message. Since validity of @param popReceipt is validated - * in the queue emulators validation middleware, we assume that it is valid (otherwise it throws @/// ). - * - * @param {any} messageId - * @param {any} popReceipt - * @memberof Queue - */ - delete(messageId, popReceipt) { - const { index } = this._getMessageAndIndex(messageId, popReceipt); - this.messages.splice(index, 1); - } + peek(numOfMessages = 1) { + const visibleItems = this.messages + .filter((i) => { + return i.visible(); + }) + .slice(0, numOfMessages) + .map((i) => { + return i; + }); + return visibleItems; + } - /** - * The Clear Messages operation deletes all messages from the queue. - * - * @memberof Queue - */ - clear() { - this.messages = []; - } + /** + * The Delete Message operation deletes the specified message. Since validity of @param popReceipt is validated + * in the queue emulators validation middleware, we assume that it is valid (otherwise it throws @/// ). + * + * @param {any} messageId + * @param {any} popReceipt + * @memberof Queue + */ + delete(messageId, popReceipt) { + const { index } = this._getMessageAndIndex(messageId, popReceipt); + this.messages.splice(index, 1); + } - /** - * The Update Message operation updates the visibility timeout of a message, and the contents of a message. - * - * @param {any} messageId - * @param {any} popReceipt - * @param {any} visibilityTimeout - * @param {any} msg - * @memberof Queue - */ - update({ messageId, popReceipt, visibilityTimeout, msg }) { - const { item } = this._getMessageAndIndex(messageId, popReceipt); - item.updateVisibilityTimeout(visibilityTimeout); - item.renewPopReceipt(); - if (msg !== undefined) { - item.msg = msg; - } - return item; - } + /** + * The Clear Messages operation deletes all messages from the queue. + * + * @memberof Queue + */ + clear() { + this.messages = []; + } - /** - * Returns the message with the specified messageId. - * - * @param {any} messageId - * @returns the according message, undefined if it does not exist. - * @memberof Queue - */ - getMessage(messageId) { - const index = this.messages.findIndex((i) => { - return i.messageId === messageId; - }); - return this.messages[index]; + /** + * The Update Message operation updates the visibility timeout of a message, and the contents of a message. + * + * @param {any} messageId + * @param {any} popReceipt + * @param {any} visibilityTimeout + * @param {any} msg + * @memberof Queue + */ + update({ messageId, popReceipt, visibilityTimeout, msg }) { + const { item } = this._getMessageAndIndex(messageId, popReceipt); + item.updateVisibilityTimeout(visibilityTimeout); + item.renewPopReceipt(); + if (msg !== undefined) { + item.msg = msg; } + return item; + } - addAcl(signedIdentifiers) { - this.signedIdentifiers = signedIdentifiers; - } + /** + * Returns the message with the specified messageId. + * + * @param {any} messageId + * @returns the according message, undefined if it does not exist. + * @memberof Queue + */ + getMessage(messageId) { + const index = this.messages.findIndex((i) => { + return i.messageId === messageId; + }); + return this.messages[index]; + } - getAcl() { - return this.signedIdentifiers; - } + addAcl(signedIdentifiers) { + this.signedIdentifiers = signedIdentifiers; + } - getLength() { - return this.messages.length; - } + getAcl() { + return this.signedIdentifiers; + } - _getMessageAndIndex(messageId, popReceipt) { - const index = this.messages.findIndex((i) => { - return i.messageId === messageId; - }); - // This should never happen due to preceding validation pipeline - if (index === -1) { - throw new InternalAzuriteError(`Queue: item with id [${messageId}] was unexpectedly not found.`); - } - const item = this.messages[index]; - // This should never happen due to preceding validation pipeline - if (item.popReceipt !== popReceipt) { - throw new InternalAzuriteError(`Queue: passed popReceipt [${popReceipt}] is unexpectedly different from item's popReceipt [${item.popReceipt}]`); - } + getLength() { + return this.messages.length; + } - return { - item: item, - index: index - }; + _getMessageAndIndex(messageId, popReceipt) { + const index = this.messages.findIndex((i) => { + return i.messageId === messageId; + }); + // This should never happen due to preceding validation pipeline + if (index === -1) { + throw new InternalAzuriteError( + `Queue: item with id [${messageId}] was unexpectedly not found.` + ); } + const item = this.messages[index]; + // This should never happen due to preceding validation pipeline + if (item.popReceipt !== popReceipt) { + throw new InternalAzuriteError( + `Queue: passed popReceipt [${popReceipt}] is unexpectedly different from item's popReceipt [${ + item.popReceipt + }]` + ); + } + + return { + item: item, + index: index, + }; + } } module.exports = Queue; diff --git a/lib/model/table/AzuriteTableRequest.js b/lib/model/table/AzuriteTableRequest.js index 82856daf8..81be13ef3 100644 --- a/lib/model/table/AzuriteTableRequest.js +++ b/lib/model/table/AzuriteTableRequest.js @@ -1,124 +1,164 @@ -'use strict'; +/** @format */ -const InternalAzuriteError = require('./../../core/InternalAzuriteError'), - RequestPayloadParser = require('./RequestPayloadParser'), - Constants = require('./../../core/Constants'), - ODataMode = require('./../../core/Constants').ODataMode, - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const InternalAzuriteError = require("./../../core/InternalAzuriteError"), + RequestPayloadParser = require("./RequestPayloadParser"), + Constants = require("./../../core/Constants"), + ODataMode = require("./../../core/Constants").ODataMode, + N = require("./../../core/HttpHeaderNames"); class AzuriteTableRequest { - constructor({ - req = undefined, - payload = undefined }) { + constructor({ req = undefined, payload = undefined }) { + if (req === undefined) { + throw new InternalAzuriteError( + "AzuriteTableRequest: req must not be undefined!" + ); + } + this.httpProps = {}; + this._initHttpProps(req.headers); + this.accept = this._parseAccept(this.httpProps[N.ACCEPT]) || ODataMode.NONE; + this.payload = RequestPayloadParser.parse( + this.httpProps[N.CONTENT_TYPE], + req.body + ); - if (req === undefined) { - throw new InternalAzuriteError('AzuriteTableRequest: req must not be undefined!'); - } - this.httpProps = {}; - this._initHttpProps(req.headers); - this.accept = this._parseAccept(this.httpProps[N.ACCEPT]) || ODataMode.NONE; - this.payload = RequestPayloadParser.parse(this.httpProps[N.CONTENT_TYPE], req.body); + this.tableName = + this.payload.TableName || + req.params[0].replace(/[\('\)]/g, "") || + undefined; - this.tableName = this.payload.TableName || req.params[0].replace(/[\('\)]/g, '') || undefined; + const res = this._parseEntityKeys(req.params[1] || ""), + partitionKey = res.partitionKey, + rowKey = res.rowKey; + this.partitionKey = this.payload.PartitionKey || partitionKey; + this.rowKey = this.payload.RowKey || rowKey; - const res = this._parseEntityKeys(req.params[1] || ''), - partitionKey = res.partitionKey, - rowKey = res.rowKey; - this.partitionKey = this.payload.PartitionKey || partitionKey; - this.rowKey = this.payload.RowKey || rowKey; + //If PartitionKey and Rowkey is passed in header a single entity is to be retireived + if (this.partitionKey && this.rowKey) { + this.singleEntity = true; + } - this.filter = req.query.$filter ? this._mapFilterQueryString(decodeURI(req.query.$filter)) : undefined; - // Maximum of 1000 items at one time are allowed, - // see https://docs.microsoft.com/rest/api/storageservices/query-timeout-and-pagination - this.top = req.query.$top || 1000; + this.filter = req.query.$filter + ? this._mapFilterQueryString(decodeURI(req.query.$filter)) + : undefined; + // Maximum of 1000 items at one time are allowed, + // see https://docs.microsoft.com/rest/api/storageservices/query-timeout-and-pagination - if (Object.keys(this.payload).length === 0 && this.payload.constructor === Object) { - this.payload === undefined; - } - } + this.top = req.query.$top || 1000; - _initHttpProps(httpHeaders) { - this.httpProps[N.DATE] = httpHeaders[N.DATE] || `x-ms-date`; - this.httpProps[N.CONTENT_TYPE] = httpHeaders[N.CONTENT_TYPE] || `application/json`; - this.httpProps[N.ACCEPT] = httpHeaders[N.ACCEPT] || `application/json;odata=nometadata`; - this.httpProps[N.PREFER] = httpHeaders[N.PREFER] || `return-content`; - this.httpProps[N.IF_MATCH] = httpHeaders[N.IF_MATCH]; + if ( + Object.keys(this.payload).length === 0 && + this.payload.constructor === Object + ) { + this.payload === undefined; } + } - _parseAccept(value) { - if (value === undefined) return undefined; - if (value.includes(`odata=nometadata`)) return Constants.ODataMode.NONE; - if (value.includes(`odata=minimalmetadata`)) return Constants.ODataMode.MINIMAL; - if (value.includes(`odata=fullmetadata`)) return Constants.ODataMode.FULL; - } + _initHttpProps(httpHeaders) { + this.httpProps[N.DATE] = httpHeaders[N.DATE] + ? httpHeaders[N.DATE] + : httpHeaders["x-ms-date"]; + this.httpProps[N.CONTENT_TYPE] = + httpHeaders[N.CONTENT_TYPE] || `application/json`; + this.httpProps[N.ACCEPT] = + httpHeaders[N.ACCEPT] || `application/json;odata=nometadata`; + this.httpProps[N.PREFER] = httpHeaders[N.PREFER] || `return-content`; + this.httpProps[N.IF_MATCH] = httpHeaders[N.IF_MATCH]; + } - _parseEntityKeys(str) { - const empty = { - partitionKey: undefined, - rowKey: undefined - }; - if (str === '') { - return empty; - } - const regex = new RegExp(/\(PartitionKey='(.*)',\s*RowKey='(.*)'\)/); - const res = regex.exec(str); - if (res === null) { - return empty; - } - return { - partitionKey: res[1], - rowKey: res[2] - }; + _parseAccept(value) { + if (value === undefined) return undefined; + if (value.includes(`odata=nometadata`)) return Constants.ODataMode.NONE; + if (value.includes(`odata=minimalmetadata`)) + return Constants.ODataMode.MINIMAL; + if (value.includes(`odata=fullmetadata`)) return Constants.ODataMode.FULL; + } + + _parseEntityKeys(str) { + const empty = { + partitionKey: undefined, + rowKey: undefined, + }; + if (str === "") { + return empty; + } + const regex = new RegExp(/\(PartitionKey='(.*)',\s*RowKey='(.*)'\)/); + const res = regex.exec(str); + if (res === null) { + return empty; } + return { + partitionKey: res[1], + rowKey: res[2], + }; + } - _mapFilterQueryString(filter) { - filter = filter - // ignoring these query keywords since we compare simply on a string-level - .replace(/\bdatetime\b/g, '') - .replace(/\bguid\b/g, '') - // A simple quotation mark is escaped with another one (i.e. ''). - // Since we will evaluate this string we replace simple quotation marks indictaing strings with template quotation marks - .replace(/''/g, '@') - .replace(/'/g, '`') - .replace(/@/g, `'`) - // Mapping 'TableName' to 'name' which is used internally as attribute name - .replace(/\bTableName\b/g, 'name') - // Mapping operators - .replace(/\beq\b/g, '===') - .replace(/\bgt\b/g, '>') - .replace(/\bge\b/g, '>=') - .replace(/\blt\b/g, '<') - .replace(/\ble\b/g, '<=') - .replace(/\bne\b/g, '!==') - .replace(/\band\b/g, '&&') - .replace(/\bor\b/g, '||') - .replace(/\(/g, ' ( ') - .replace(/\)/g, ' ) ') - .replace(/\bnot\b/g, ' ! '); + _mapFilterQueryString(filter) { + filter = filter + // ignoring these query keywords since we compare simply on a string-level + .replace(/\bdatetime\b/g, "") + .replace(/\bguid\b/g, "") + // A simple quotation mark is escaped with another one (i.e. ''). + // Since we will evaluate this string we replace simple quotation marks indictaing strings with template quotation marks + .replace(/''/g, "@") + .replace(/'/g, "`") + .replace(/@/g, `'`) + // Mapping 'TableName' to 'name' which is used internally as attribute name + .replace(/\bTableName\b/g, "name") + // Mapping operators + .replace(/\beq\b/g, "===") + .replace(/\bgt\b/g, ">") + .replace(/\bge\b/g, ">=") + .replace(/\blt\b/g, "<") + .replace(/\ble\b/g, "<=") + .replace(/\bne\b/g, "!==") + .replace(/\band\b/g, "&&") + .replace(/\bor\b/g, "||") + .replace(/\(/g, " ( ") + .replace(/\)/g, " ) ") + .replace(/\bnot\b/g, " ! "); - // If a token is neither a number, nor a boolean, nor a string enclosed with quotation marks it is an operand. - // Operands are attributes of the object used within the where clause of LokiJS, thus we need to prepend each - // attribute with an object identifier 'item.attribs'. - let transformedQuery = ''; - for (const token of filter.split(' ')) { - if (token === '') { - continue; - } - if (!token.match(/\d+/) && - token !== 'true' && token !== 'false' && - !token.includes('`') && - !['===', '>', '>=', '<', '<=', '!==', '&&', '||', '!', '(', ')'].includes(token)) { - if (token === 'PartitionKey' || token === 'RowKey') { - transformedQuery += `item.${token[0].toLowerCase()}${token.slice(1)} `; - } else { - transformedQuery += `item.attribs.${token} `; - } - } else { - transformedQuery += `${token} `; - } + // If a token is neither a number, nor a boolean, nor a string enclosed with quotation marks it is an operand. + // Operands are attributes of the object used within the where clause of LokiJS, thus we need to prepend each + // attribute with an object identifier 'item.attribs'. + let transformedQuery = ""; + for (const token of filter.split(" ")) { + if (token === "") { + continue; + } + if ( + !token.match(/\d+/) && + token !== "true" && + token !== "false" && + !token.includes("`") && + ![ + "===", + ">", + ">=", + "<", + "<=", + "!==", + "&&", + "||", + "!", + "(", + ")", + ].includes(token) + ) { + if (token === "PartitionKey" || token === "RowKey") { + transformedQuery += `item.${token[0].toLowerCase()}${token.slice( + 1 + )} `; + } else { + transformedQuery += `item.attribs.${token} `; } - return transformedQuery; + } else { + transformedQuery += `${token} `; + } } + return transformedQuery; + } } -module.exports = AzuriteTableRequest; \ No newline at end of file +module.exports = AzuriteTableRequest; diff --git a/lib/model/table/AzuriteTableResponse.js b/lib/model/table/AzuriteTableResponse.js index 147cddcb9..38e172fd5 100644 --- a/lib/model/table/AzuriteTableResponse.js +++ b/lib/model/table/AzuriteTableResponse.js @@ -1,23 +1,25 @@ -'use strict'; +/** @format */ -const uuidV1 = require('uuid/v1'), - N = require('./../../core/HttpHeaderNames'); +"use strict"; + +const uuidV1 = require("uuid/v1"), + N = require("./../../core/HttpHeaderNames"); class AzuriteTableResponse { - constructor({ proxy = undefined, payload = undefined }) { - this.proxy = proxy; - this.httpProps = {}; - this.httpProps[N.VERSION] = '2016-05-31'; - this.httpProps[N.DATE] = new Date().toGMTString(); - this.httpProps[N.REQUEST_ID] = uuidV1(); - this.payload = payload; - } + constructor({ proxy = undefined, payload = undefined }) { + this.proxy = proxy; + this.httpProps = {}; + this.httpProps[N.VERSION] = "2016-05-31"; + this.httpProps[N.DATE] = new Date().toGMTString(); + this.httpProps[N.REQUEST_ID] = uuidV1(); + this.payload = payload; + } - addHttpProperty(key, value) { - if (value !== undefined) { - this.httpProps[key] = value; - } + addHttpProperty(key, value) { + if (value !== undefined) { + this.httpProps[key] = value; } + } } module.exports = AzuriteTableResponse; diff --git a/lib/model/table/BaseProxy.js b/lib/model/table/BaseProxy.js index 773f68624..096d24766 100644 --- a/lib/model/table/BaseProxy.js +++ b/lib/model/table/BaseProxy.js @@ -1,46 +1,50 @@ -'use strict'; +/** @format */ -const ODataMode = require('./../../core/Constants').ODataMode, - InternalAzuriteError = require('./../../core/InternalAzuriteError'); +"use strict"; + +const ODataMode = require("./../../core/Constants").ODataMode, + InternalAzuriteError = require("./../../core/InternalAzuriteError"); class BaseProxy { - constructor(entity) { - this._ = entity; - } + constructor(entity) { + this._ = entity; + } - /** - * Returns the odata representation of the any (Table, Entity) entity. - * - * @param {any} odata is (nometadata|minimalmetadata|fullmetadata) - * @returns - * @memberof TableProxy - */ - odata(mode) { - switch (mode) { - case ODataMode.NONE: - return { - TableName: this._.name - } - break; - case ODataMode.MINIMAL: - return { - "odata.metadata": this._.odata.metadata, - TableName: this._.name - } - break; - case ODataMode.FULL: - return { - "odata.metadata": this._.odata.metadata, - "odata.type": this._.odata.type, - "odata.id": this._.odata.id, - "odata.editLink": this._.odata.editLink, - TableName: this._.name - } - break; - default: - throw new InternalAzuriteError(`TableProxy: Unsupported OData type "${mode}".`); - } + /** + * Returns the odata representation of the any (Table, Entity) entity. + * + * @param {any} odata is (nometadata|minimalmetadata|fullmetadata) + * @returns + * @memberof TableProxy + */ + odata(mode) { + switch (mode) { + case ODataMode.NONE: + return { + TableName: this._.name, + }; + break; + case ODataMode.MINIMAL: + return { + "odata.metadata": this._.odata.metadata, + TableName: this._.name, + }; + break; + case ODataMode.FULL: + return { + "odata.metadata": this._.odata.metadata, + "odata.type": this._.odata.type, + "odata.id": this._.odata.id, + "odata.editLink": this._.odata.editLink, + TableName: this._.name, + }; + break; + default: + throw new InternalAzuriteError( + `TableProxy: Unsupported OData type "${mode}".` + ); } + } } -module.exports = BaseProxy; \ No newline at end of file +module.exports = BaseProxy; diff --git a/lib/model/table/EntityGenerator.js b/lib/model/table/EntityGenerator.js index 133a2756c..78a082db1 100644 --- a/lib/model/table/EntityGenerator.js +++ b/lib/model/table/EntityGenerator.js @@ -1,58 +1,68 @@ -'use strict'; +/** @format */ -const etag = require('./../../core/utils').computeEtag; +"use strict"; + +const etag = require("./../../core/utils").computeEtag; const _baseUrl = `http://127.0.0.1:10002/devstoreaccount1/`; /** * Generates a Table Storage 'Table' entity and a Table Storage 'Entity' entity. - * + * * @class TableGenerator */ class EntityGenerator { - constructor() { - } + constructor() {} - /** - * Generates a persistable table storage 'Table' entity representation. - * - * @param {any} name of the table - * @returns - * @memberof TableGenerator - */ - generateTable(name) { - const entity = {}; - entity.name = name; - entity.odata = {}; - entity.odata.metadata = `${_baseUrl}$metadata#Tables/@Element`; - entity.odata.type = `devstoreaccount1.Tables`; - entity.odata.id = `${_baseUrl}Tables('${name}')`; - entity.odata.editLink = `Tables('${name}')`; - return entity; - } + /** + * Generates a persistable table storage 'Table' entity representation. + * + * @param {any} name of the table + * @returns + * @memberof TableGenerator + */ + generateTable(name) { + const entity = {}; + entity.name = name; + entity.odata = {}; + entity.odata.metadata = `${_baseUrl}$metadata#Tables/@Element`; + entity.odata.type = `devstoreaccount1.Tables`; + entity.odata.id = `${_baseUrl}Tables('${name}')`; + entity.odata.editLink = `Tables('${name}')`; + return entity; + } - generateEntity(rawEntity, tableName) { - // Enriching raw entity from payload with odata attributes - const entity = { attribs: {} }; - entity.partitionKey = rawEntity.PartitionKey; - entity.rowKey = rawEntity.RowKey; - entity.attribs.Timestamp = new Date().toISOString(); - entity.attribs['Timestamp@odata.type'] = "Edm.DateTime"; - for (const key of Object.keys(rawEntity)) { - if (key === 'PartitionKey' || key === 'RowKey' || key === 'Timestamp') { - continue; - } - entity.attribs[key] = rawEntity[key]; - } - - entity.odata = {}; - entity.odata.metadata = `${_baseUrl}${tableName}$metadata#${tableName}/@Element`; - entity.odata.type = `devstoreaccount1.${tableName}`; - entity.odata.id = `${_baseUrl}${tableName}(PartitionKey='${rawEntity.PartitionKey}',RowKey='${rawEntity.RowKey}')`; - entity.odata.editLink = `${tableName}(PartitionKey='${rawEntity.PartitionKey}',RowKey='${rawEntity.RowKey}')`; - entity.odata.etag = etag(JSON.stringify(rawEntity)); - return entity; + generateEntity( + rawEntity, + tableName, + partitionKey = undefined, + rowKey = undefined + ) { + // Enriching raw entity from payload with odata attributes + const entity = { attribs: {} }; + entity.partitionKey = partitionKey || rawEntity.PartitionKey; + entity.rowKey = rowKey || rawEntity.RowKey; + entity.attribs.Timestamp = new Date().toISOString(); + entity.attribs["Timestamp@odata.type"] = "Edm.DateTime"; + for (const key of Object.keys(rawEntity)) { + if (key === "PartitionKey" || key === "RowKey" || key === "Timestamp") { + continue; + } + entity.attribs[key] = rawEntity[key]; } + + entity.odata = {}; + entity.odata.metadata = `${_baseUrl}${tableName}$metadata#${tableName}/@Element`; + entity.odata.type = `devstoreaccount1.${tableName}`; + entity.odata.id = `${_baseUrl}${tableName}(PartitionKey='${ + rawEntity.PartitionKey + }',RowKey='${rawEntity.RowKey}')`; + entity.odata.editLink = `${tableName}(PartitionKey='${ + rawEntity.PartitionKey + }',RowKey='${rawEntity.RowKey}')`; + entity.odata.etag = etag(JSON.stringify(rawEntity)); + return entity; + } } -module.exports = new EntityGenerator(); \ No newline at end of file +module.exports = new EntityGenerator(); diff --git a/lib/model/table/EntityProxy.js b/lib/model/table/EntityProxy.js index 2d4de6e3b..f2eb7a93a 100644 --- a/lib/model/table/EntityProxy.js +++ b/lib/model/table/EntityProxy.js @@ -1,54 +1,69 @@ -'use strict'; +/** @format */ -const ODataMode = require('./../../core/Constants').ODataMode, - BaseProxy = require('./BaseProxy'), - InternalAzuriteError = require('./../../core/InternalAzuriteError'); +"use strict"; + +const ODataMode = require("./../../core/Constants").ODataMode, + BaseProxy = require("./BaseProxy"), + InternalAzuriteError = require("./../../core/InternalAzuriteError"); class EntityProxy extends BaseProxy { - constructor(entity) { - super(entity); - this.partitionKey = entity.partitionKey; - this.rowKey = entity.rowKey; - this.etag = `\"${entity.odata.etag}\"`; - } + constructor(entity) { + super(entity); + this.partitionKey = entity.partitionKey; + this.rowKey = entity.rowKey; + this.etag = `\"${entity.odata.etag}\"`; + } - /** - * Returns the odata representation of the 'Entity' entity. - * - * @param {any} mode is (nometadata|minimalmetadata|fullmetadata) - * @returns - * @memberof EntityProxy - */ - odata(mode) { - const odata = super.odata(mode); - if (mode === ODataMode.FULL) { - odata['odata.etag'] = this._.odata.etag; - } - return odata; + /** + * Returns the odata representation of the 'Entity' entity. + * + * @param {any} mode is (nometadata|minimalmetadata|fullmetadata) + * @returns + * @memberof EntityProxy + */ + odata(mode) { + const odata = super.odata(mode); + if (mode === ODataMode.FULL) { + odata["odata.etag"] = this._.odata.etag; } + return odata; + } + + /** + * Returns all attributes (including partition key, row key) of the entity, and + * depending on @param mode the odata type specifications. + * + * @param {any} mode is (nometadata|minimalmetadata|fullmetadata) + * @returns + * @memberof EntityProxy + */ + attribs(mode) { + // this is described here: + // https://docs.microsoft.com/en-us/rest/api/storageservices/payload-format-for-table-service-operations + let filteredAttribs = {}; + if (mode === ODataMode.FULL) { + return this._.attribs; // also return the OData type specifications + } else if (mode === ODataMode.MINIMAL) { + // In case of no metadata we filter out most of the OData info and "Timestamp@odata.type" - /** - * Returns all attributes (including partition key, row key) of the entity, and - * depending on @param mode the odata type specifications. - * - * @param {any} mode is (nometadata|minimalmetadata|fullmetadata) - * @returns - * @memberof EntityProxy - */ - attribs(mode) { - if (mode === ODataMode.MINIMAL || mode === ODataMode.FULL) { - return this._.attribs; // also return the OData type specifications + for (const key of Object.keys(this._.attribs)) { + if (key.includes("Timestamp@odata.type")) { + continue; } - // In case of no metadata we filter out the OData type specifications - const attribs = {}; - for (const key of Object.keys(this._.attribs)) { - if (key.includes('@odata.type')) { - continue; - } - attribs[key] = this._.attribs[key]; + filteredAttribs[key] = this._.attribs[key]; + } + } else { + // In case of no metadata we filter out all the OData type specifications + const attribs = {}; + for (const key of Object.keys(this._.attribs)) { + if (key.includes("@odata.type")) { + continue; } - return attribs; + filteredAttribs[key] = this._.attribs[key]; + } } + return filteredAttribs; + } } -module.exports = EntityProxy; \ No newline at end of file +module.exports = EntityProxy; diff --git a/lib/model/table/RequestPayloadParser.js b/lib/model/table/RequestPayloadParser.js index d610c36d3..b171734b6 100644 --- a/lib/model/table/RequestPayloadParser.js +++ b/lib/model/table/RequestPayloadParser.js @@ -1,31 +1,35 @@ -'use strict' +/** @format */ -const IAError = require('./../../core/InternalAzuriteError'); +"use strict"; + +const IAError = require("./../../core/InternalAzuriteError"); class RequestPayLoadParser { - constructor() { - } + constructor() {} - parse(contentType, body) { - if (!body.length || body.length === 0) { - return {}; - } - switch (contentType) { - case 'application/atom+xml': - case 'application/atom+xml;': - throw new IAError(`accept value of 'atom+xml' is currently not supported by Azurite`); - break; - case 'application/json': - case 'application/json;': - const txt = body.toString('utf8'); - return JSON.parse(txt); - break; - default: - // This should never happen! (should be catched by validation pipeline) - throw new IAError(`content-type value ${contentType} is not supported by Azurite.`) - } + parse(contentType, body) { + if (!body.length || body.length === 0) { + return {}; } + switch (contentType) { + case "application/atom+xml": + case "application/atom+xml;": + throw new IAError( + `accept value of 'atom+xml' is currently not supported by Azurite` + ); + break; + case "application/json": + case "application/json;": + const txt = body.toString("utf8"); + return JSON.parse(txt); + break; + default: + // This should never happen! (should be catched by validation pipeline) + throw new IAError( + `content-type value ${contentType} is not supported by Azurite.` + ); + } + } } -module.exports = new RequestPayLoadParser; - +module.exports = new RequestPayLoadParser(); diff --git a/lib/model/table/TableProxy.js b/lib/model/table/TableProxy.js index a01d37206..6edcbd741 100644 --- a/lib/model/table/TableProxy.js +++ b/lib/model/table/TableProxy.js @@ -1,12 +1,14 @@ -'use strict'; +/** @format */ -const BaseProxy = require('./BaseProxy'); +"use strict"; + +const BaseProxy = require("./BaseProxy"); class TableProxy extends BaseProxy { - constructor(entity) { - super(entity); - this.name = entity.name; - } + constructor(entity) { + super(entity); + this.name = entity.name; + } } -module.exports = TableProxy; \ No newline at end of file +module.exports = TableProxy; diff --git a/lib/routes/blob/AccountRoute.js b/lib/routes/blob/AccountRoute.js index 027af3e82..ec2a42009 100644 --- a/lib/routes/blob/AccountRoute.js +++ b/lib/routes/blob/AccountRoute.js @@ -1,10 +1,12 @@ -'use strict'; +/** @format */ -const env = require('./../../core/env'), - ContainerRequest = require('./../../model/blob/AzuriteContainerRequest'), - Serializers = require('./../../xml/Serializers'), - AzuriteRequest = require('./../../model/blob/AzuriteRequest'), - Operations = require('./../../core/Constants').Operations; +"use strict"; + +const env = require("./../../core/env"), + ContainerRequest = require("./../../model/blob/AzuriteContainerRequest"), + Serializers = require("./../../xml/Serializers"), + AzuriteRequest = require("./../../model/blob/AzuriteRequest"), + Operations = require("./../../core/Constants").Operations; /* * Route definitions for all operation on the 'Account' resource type. @@ -12,33 +14,36 @@ const env = require('./../../core/env'), * for details on specification. */ module.exports = (app) => { - app.route(`/${env.emulatedStorageAccountName}`) - .get((req, res, next) => { - if (req.query.comp === 'list') { - req.azuriteOperation = Operations.Account.LIST_CONTAINERS; - req.azuriteRequest = new ContainerRequest({ req: req }); - } - if (req.query.comp === 'properties' && req.query.restype === 'service') { - req.azuriteOperation = Operations.Account.GET_BLOB_SERVICE_PROPERTIES; - req.azuriteRequest = new AzuriteRequest({ req: req }); - } - next(); - }) - .put((req, res, next) => { - if (req.query.comp === 'properties' && req.query.restype === 'service') { - req.azuriteOperation = Operations.Account.SET_BLOB_SERVICE_PROPERTIES; - Serializers.parseServiceProperties(req.body) - .then((result) => { - req.azuriteRequest = new AzuriteRequest({ req: req, payload: result }); - next(); - }); - return; - } - next(); - }) - .options((req, res, next) => { - req.azuriteOperation = Operations.Account.PREFLIGHT_BLOB_REQUEST; - req.azuriteRequest = new AzuriteRequest({ req: req }); - next(); + app + .route(`/${env.emulatedStorageAccountName}`) + .get((req, res, next) => { + if (req.query.comp === "list") { + req.azuriteOperation = Operations.Account.LIST_CONTAINERS; + req.azuriteRequest = new ContainerRequest({ req: req }); + } + if (req.query.comp === "properties" && req.query.restype === "service") { + req.azuriteOperation = Operations.Account.GET_BLOB_SERVICE_PROPERTIES; + req.azuriteRequest = new AzuriteRequest({ req: req }); + } + next(); + }) + .put((req, res, next) => { + if (req.query.comp === "properties" && req.query.restype === "service") { + req.azuriteOperation = Operations.Account.SET_BLOB_SERVICE_PROPERTIES; + Serializers.parseServiceProperties(req.body).then((result) => { + req.azuriteRequest = new AzuriteRequest({ + req: req, + payload: result, + }); + next(); }); -} \ No newline at end of file + return; + } + next(); + }) + .options((req, res, next) => { + req.azuriteOperation = Operations.Account.PREFLIGHT_BLOB_REQUEST; + req.azuriteRequest = new AzuriteRequest({ req: req }); + next(); + }); +}; diff --git a/lib/routes/blob/BlobRoute.js b/lib/routes/blob/BlobRoute.js index d221d40f9..4790191e5 100644 --- a/lib/routes/blob/BlobRoute.js +++ b/lib/routes/blob/BlobRoute.js @@ -1,11 +1,13 @@ -'use strict'; +/** @format */ -const env = require('./../../core/env'), - BlobRequest = require('./../../model/blob/AzuriteBlobRequest'), - AzuriteRequest = require('./../../model/blob/AzuriteRequest'), - EntityType = require('./../../core/Constants').StorageEntityType, - Serializers = require('./../../xml/Serializers'), - Operations = require('./../../core/Constants').Operations; +"use strict"; + +const env = require("./../../core/env"), + BlobRequest = require("./../../model/blob/AzuriteBlobRequest"), + AzuriteRequest = require("./../../model/blob/AzuriteRequest"), + EntityType = require("./../../core/Constants").StorageEntityType, + Serializers = require("./../../xml/Serializers"), + Operations = require("./../../core/Constants").Operations; /* * Route definitions for all operation on the 'Blob' resource type. @@ -13,72 +15,77 @@ const env = require('./../../core/env'), * for details on specification. */ module.exports = (app) => { - app.route(`/${env.emulatedStorageAccountName}/:container/*?`) - .get((req, res, next) => { - if (req.query.comp === 'blocklist') { - req.azuriteOperation = Operations.Blob.GET_BLOCK_LIST; - } else if (req.query.comp === 'metadata') { - req.azuriteOperation = Operations.Blob.GET_BLOB_METADATA; - } else if (req.query.comp === 'pagelist') { - req.azuriteOperation = Operations.Blob.GET_PAGE_RANGES; - } - else { - req.azuriteOperation = Operations.Blob.GET_BLOB; - } - req.azuriteRequest = new BlobRequest({ req: req }); - next(); - }) - .head((req, res, next) => { - req.azuriteOperation = Operations.Blob.GET_BLOB_PROPERTIES; - req.azuriteRequest = new BlobRequest({ req: req }); - next(); - }) - .put((req, res, next) => { - let entityType = null; - if (req.query.comp === 'block') { - req.azuriteOperation = Operations.Blob.PUT_BLOCK; - entityType = EntityType.BlockBlob; - } else if (req.query.comp === 'blocklist') { - req.azuriteOperation = Operations.Blob.PUT_BLOCK_LIST; - Serializers.deserializeBlockList(req.body) - .then((blocklist) => { - req.azuriteRequest = new BlobRequest({ req: req, entityType: EntityType.BlockBlob, payload: blocklist }); - next(); - }) - return; - } else if (req.query.comp === 'page') { - req.azuriteOperation = Operations.Blob.PUT_PAGE; - entityType = EntityType.PageBlob; - } else if (req.query.comp === 'appendblock') { - req.azuriteOperation = Operations.Blob.APPEND_BLOCK; - entityType = EntityType.AppendBlob; - } else if (req.query.comp === 'snapshot') { - req.azuriteOperation = Operations.Blob.SNAPSHOT_BLOB; - } else if (req.query.comp === 'lease') { - req.azuriteOperation = Operations.Blob.LEASE_BLOB; - } else if (req.query.comp === 'metadata') { - req.azuriteOperation = Operations.Blob.SET_BLOB_METADATA; - } else if (req.query.comp === 'properties') { - req.azuriteOperation = Operations.Blob.SET_BLOB_PROPERTIES; - } else if (req.query.comp === 'copy') { - req.azuriteOperation = Operations.Blob.ABORT_COPY_BLOB; - } - else if (req.headers['x-ms-copy-source'] !== undefined) { - req.azuriteOperation = Operations.Blob.COPY_BLOB; - } else { - req.azuriteOperation = Operations.Blob.PUT_BLOB; - } - req.azuriteRequest = new BlobRequest({ req: req, entityType: entityType }); - next(); - }) - .delete((req, res, next) => { - req.azuriteOperation = Operations.Blob.DELETE_BLOB; - req.azuriteRequest = new BlobRequest({ req: req }); - next(); - }) - .options((req, res, next) => { - req.azuriteOperation = Operations.Account.PREFLIGHT_BLOB_REQUEST; - req.azuriteRequest = new AzuriteRequest({ req: req }); - next(); + app + .route(`/${env.emulatedStorageAccountName}/:container/*?`) + .get((req, res, next) => { + if (req.query.comp === "blocklist") { + req.azuriteOperation = Operations.Blob.GET_BLOCK_LIST; + } else if (req.query.comp === "metadata") { + req.azuriteOperation = Operations.Blob.GET_BLOB_METADATA; + } else if (req.query.comp === "pagelist") { + req.azuriteOperation = Operations.Blob.GET_PAGE_RANGES; + } else { + req.azuriteOperation = Operations.Blob.GET_BLOB; + } + req.azuriteRequest = new BlobRequest({ req: req }); + next(); + }) + .head((req, res, next) => { + req.azuriteOperation = Operations.Blob.GET_BLOB_PROPERTIES; + req.azuriteRequest = new BlobRequest({ req: req }); + next(); + }) + .put((req, res, next) => { + let entityType = null; + if (req.query.comp === "block") { + req.azuriteOperation = Operations.Blob.PUT_BLOCK; + entityType = EntityType.BlockBlob; + } else if (req.query.comp === "blocklist") { + req.azuriteOperation = Operations.Blob.PUT_BLOCK_LIST; + Serializers.deserializeBlockList(req.body).then((blocklist) => { + req.azuriteRequest = new BlobRequest({ + req: req, + entityType: EntityType.BlockBlob, + payload: blocklist, + }); + next(); }); -} \ No newline at end of file + return; + } else if (req.query.comp === "page") { + req.azuriteOperation = Operations.Blob.PUT_PAGE; + entityType = EntityType.PageBlob; + } else if (req.query.comp === "appendblock") { + req.azuriteOperation = Operations.Blob.APPEND_BLOCK; + entityType = EntityType.AppendBlob; + } else if (req.query.comp === "snapshot") { + req.azuriteOperation = Operations.Blob.SNAPSHOT_BLOB; + } else if (req.query.comp === "lease") { + req.azuriteOperation = Operations.Blob.LEASE_BLOB; + } else if (req.query.comp === "metadata") { + req.azuriteOperation = Operations.Blob.SET_BLOB_METADATA; + } else if (req.query.comp === "properties") { + req.azuriteOperation = Operations.Blob.SET_BLOB_PROPERTIES; + } else if (req.query.comp === "copy") { + req.azuriteOperation = Operations.Blob.ABORT_COPY_BLOB; + } else if (req.headers["x-ms-copy-source"] !== undefined) { + req.azuriteOperation = Operations.Blob.COPY_BLOB; + } else { + req.azuriteOperation = Operations.Blob.PUT_BLOB; + } + req.azuriteRequest = new BlobRequest({ + req: req, + entityType: entityType, + }); + next(); + }) + .delete((req, res, next) => { + req.azuriteOperation = Operations.Blob.DELETE_BLOB; + req.azuriteRequest = new BlobRequest({ req: req }); + next(); + }) + .options((req, res, next) => { + req.azuriteOperation = Operations.Account.PREFLIGHT_BLOB_REQUEST; + req.azuriteRequest = new AzuriteRequest({ req: req }); + next(); + }); +}; diff --git a/lib/routes/blob/ContainerRoute.js b/lib/routes/blob/ContainerRoute.js index 50fd5b561..3a8c956cf 100644 --- a/lib/routes/blob/ContainerRoute.js +++ b/lib/routes/blob/ContainerRoute.js @@ -1,17 +1,22 @@ -'use strict'; +/** @format */ -const ContainerRequest = require('./../../model/blob/AzuriteContainerRequest'), - StorageManager = require('./../../core/blob/StorageManager'), - Usage = require('./../../core/Constants').Usage, - env = require('./../../core/env'), - Serializers = require('./../../xml/Serializers'), - Operations = require('./../../core/Constants').Operations; +"use strict"; + +const ContainerRequest = require("./../../model/blob/AzuriteContainerRequest"), + StorageManager = require("./../../core/blob/StorageManager"), + Usage = require("./../../core/Constants").Usage, + env = require("./../../core/env"), + Serializers = require("./../../xml/Serializers"), + Operations = require("./../../core/Constants").Operations; // Possibly implicit call to blob in $root container const REWRITE_URL_AND_FORWARD_TO_BLOB_ROUTE = (req, next) => { - req.url = req.url.replace(req.params.container, `$root/${req.params.container}`); - next('route'); -} + req.url = req.url.replace( + req.params.container, + `$root/${req.params.container}` + ); + next("route"); +}; /* * Route definitions for all operation on the 'Container' resource type. @@ -19,71 +24,87 @@ const REWRITE_URL_AND_FORWARD_TO_BLOB_ROUTE = (req, next) => { * for details on specification. */ module.exports = (app) => { - app.route(`/${env.emulatedStorageAccountName}/:container`) - .get((req, res, next) => { - if (req.query.restype === 'container' && req.query.comp === 'list') { - req.azuriteOperation = Operations.Container.LIST_BLOBS; - } else if (req.query.restype === 'container' && req.query.comp === 'metadata') { - req.azuriteOperation = Operations.Container.GET_CONTAINER_METADATA; - } else if (req.query.restype === 'container' && req.query.comp === 'acl') { - req.azuriteOperation = Operations.Container.GET_CONTAINER_ACL; - } else if (req.query.restype === 'container') { - req.azuriteOperation = Operations.Container.GET_CONTAINER_PROPERTIES; - } else { - REWRITE_URL_AND_FORWARD_TO_BLOB_ROUTE(req, next); - return; - } - req.azuriteRequest = new ContainerRequest({ req: req }); - next(); - }) - .head((req, res, next) => { - if (req.query.restype === 'container' && req.query.comp === 'metadata') { - req.azuriteOperation = Operations.Container.GET_CONTAINER_METADATA; - } - else if (req.query.restype === 'container' && req.query.comp === 'acl') { - req.azuriteOperation = Operations.Container.GET_CONTAINER_ACL; - } else if (req.query.restype === 'container') { - req.azuriteOperation = Operations.Container.GET_CONTAINER_PROPERTIES; - } else { - REWRITE_URL_AND_FORWARD_TO_BLOB_ROUTE(req, next); - return; - } - req.azuriteRequest = new ContainerRequest({ req: req }); - next(); - }) - .put((req, res, next) => { - if (req.query.restype === 'container' && req.query.comp === 'metadata') { - req.azuriteOperation = Operations.Container.SET_CONTAINER_METADATA; - } - else if (req.query.restype === 'container' && req.query.comp === 'acl') { - req.azuriteOperation = Operations.Container.SET_CONTAINER_ACL; - Serializers.parseSignedIdentifiers(req.body) - .then((signedIdentifiers) => { - req.azuriteRequest = new ContainerRequest({ req: req, payload: signedIdentifiers }); - next(); - }) - return; - } - else if (req.query.restype === 'container' && req.query.comp === 'lease') { - req.azuriteOperation = Operations.Container.LEASE_CONTAINER; - } - else if (req.query.restype === 'container') { - req.azuriteOperation = Operations.Container.CREATE_CONTAINER; - } else { - REWRITE_URL_AND_FORWARD_TO_BLOB_ROUTE(req, next); - return; - } - req.azuriteRequest = new ContainerRequest({ req: req }); - next(); - }) - .delete((req, res, next) => { - if (req.query.restype === 'container') { - req.azuriteOperation = Operations.Container.DELETE_CONTAINER; - } else { - REWRITE_URL_AND_FORWARD_TO_BLOB_ROUTE(req, next); - return; - } - req.azuriteRequest = new ContainerRequest({ req: req }); + app + .route(`/${env.emulatedStorageAccountName}/:container`) + .get((req, res, next) => { + if (req.query.restype === "container" && req.query.comp === "list") { + req.azuriteOperation = Operations.Container.LIST_BLOBS; + } else if ( + req.query.restype === "container" && + req.query.comp === "metadata" + ) { + req.azuriteOperation = Operations.Container.GET_CONTAINER_METADATA; + } else if ( + req.query.restype === "container" && + req.query.comp === "acl" + ) { + req.azuriteOperation = Operations.Container.GET_CONTAINER_ACL; + } else if (req.query.restype === "container") { + req.azuriteOperation = Operations.Container.GET_CONTAINER_PROPERTIES; + } else { + REWRITE_URL_AND_FORWARD_TO_BLOB_ROUTE(req, next); + return; + } + req.azuriteRequest = new ContainerRequest({ req: req }); + next(); + }) + .head((req, res, next) => { + if (req.query.restype === "container" && req.query.comp === "metadata") { + req.azuriteOperation = Operations.Container.GET_CONTAINER_METADATA; + } else if ( + req.query.restype === "container" && + req.query.comp === "acl" + ) { + req.azuriteOperation = Operations.Container.GET_CONTAINER_ACL; + } else if (req.query.restype === "container") { + req.azuriteOperation = Operations.Container.GET_CONTAINER_PROPERTIES; + } else { + REWRITE_URL_AND_FORWARD_TO_BLOB_ROUTE(req, next); + return; + } + req.azuriteRequest = new ContainerRequest({ req: req }); + next(); + }) + .put((req, res, next) => { + if (req.query.restype === "container" && req.query.comp === "metadata") { + req.azuriteOperation = Operations.Container.SET_CONTAINER_METADATA; + } else if ( + req.query.restype === "container" && + req.query.comp === "acl" + ) { + req.azuriteOperation = Operations.Container.SET_CONTAINER_ACL; + Serializers.parseSignedIdentifiers(req.body).then( + (signedIdentifiers) => { + req.azuriteRequest = new ContainerRequest({ + req: req, + payload: signedIdentifiers, + }); next(); - }); -} \ No newline at end of file + } + ); + return; + } else if ( + req.query.restype === "container" && + req.query.comp === "lease" + ) { + req.azuriteOperation = Operations.Container.LEASE_CONTAINER; + } else if (req.query.restype === "container") { + req.azuriteOperation = Operations.Container.CREATE_CONTAINER; + } else { + REWRITE_URL_AND_FORWARD_TO_BLOB_ROUTE(req, next); + return; + } + req.azuriteRequest = new ContainerRequest({ req: req }); + next(); + }) + .delete((req, res, next) => { + if (req.query.restype === "container") { + req.azuriteOperation = Operations.Container.DELETE_CONTAINER; + } else { + REWRITE_URL_AND_FORWARD_TO_BLOB_ROUTE(req, next); + return; + } + req.azuriteRequest = new ContainerRequest({ req: req }); + next(); + }); +}; diff --git a/lib/routes/blob/NotFoundRoute.js b/lib/routes/blob/NotFoundRoute.js index 242a03dc8..197c96a84 100644 --- a/lib/routes/blob/NotFoundRoute.js +++ b/lib/routes/blob/NotFoundRoute.js @@ -1,3 +1,5 @@ +/** @format */ + "use strict"; /* @@ -6,18 +8,19 @@ * on an existing resource and a supported method on a non-existing resource at this point in time we return 501. The latter case will be * handled accordingly later in the validation middleware. */ -module.exports = app => { - app.route('*').all((req, res, next) => { - if (req.azuriteRequest) { - next(); - } else { - res.status(501); - res.send( - `Path or Http-Method does not match any emulated command.\n` + - `Possible causes include\n` + - ` - missing account name path parameter\n` + - ` - Unsupported Http-Method on resource\n` + - ` - Unsupported / Not implemented "comp" query parameter on resource`); - } - }); -}; \ No newline at end of file +module.exports = (app) => { + app.route("*").all((req, res, next) => { + if (req.azuriteRequest) { + next(); + } else { + res.status(501); + res.send( + `Path or Http-Method does not match any emulated command.\n` + + `Possible causes include\n` + + ` - missing account name path parameter\n` + + ` - Unsupported Http-Method on resource\n` + + ` - Unsupported / Not implemented "comp" query parameter on resource` + ); + } + }); +}; diff --git a/lib/routes/queue/AccountRoute.js b/lib/routes/queue/AccountRoute.js index 5295c5d5b..778e925d5 100644 --- a/lib/routes/queue/AccountRoute.js +++ b/lib/routes/queue/AccountRoute.js @@ -1,8 +1,10 @@ -'use strict'; +/** @format */ -const env = require('./../../core/env'), - AzuriteQueueRequest = require('../../model/queue/AzuriteQueueRequest'), - Operations = require('./../../core/Constants').Operations; +"use strict"; + +const env = require("./../../core/env"), + AzuriteQueueRequest = require("../../model/queue/AzuriteQueueRequest"), + Operations = require("./../../core/Constants").Operations; /* * Route definitions for all operation on the 'account' resource type. @@ -10,12 +12,11 @@ const env = require('./../../core/env'), * for details on specification. */ module.exports = (app) => { - app.route(`/${env.emulatedStorageAccountName}`) - .get((req, res, next) => { - if (req.query.comp === 'list') { - req.azuriteOperation = Operations.Queue.LIST_QUEUES; - } - req.azuriteRequest = new AzuriteQueueRequest({ req: req }); - next(); - }); -} \ No newline at end of file + app.route(`/${env.emulatedStorageAccountName}`).get((req, res, next) => { + if (req.query.comp === "list") { + req.azuriteOperation = Operations.Queue.LIST_QUEUES; + } + req.azuriteRequest = new AzuriteQueueRequest({ req: req }); + next(); + }); +}; diff --git a/lib/routes/queue/MessageRoute.js b/lib/routes/queue/MessageRoute.js index 0a3a92c48..ec6ba4b13 100644 --- a/lib/routes/queue/MessageRoute.js +++ b/lib/routes/queue/MessageRoute.js @@ -1,9 +1,11 @@ -'use strict'; +/** @format */ -const env = require('./../../core/env'), - QueueMessageTextXmlModel = require('./../../xml/queue/QueueMessageText'), - AzuriteQueueRequest = require('../../model/queue/AzuriteQueueRequest'), - Operations = require('./../../core/Constants').Operations; +"use strict"; + +const env = require("./../../core/env"), + QueueMessageTextXmlModel = require("./../../xml/queue/QueueMessageText"), + AzuriteQueueRequest = require("../../model/queue/AzuriteQueueRequest"), + Operations = require("./../../core/Constants").Operations; /* * Route definitions for all operation on the 'message' resource type. @@ -11,43 +13,51 @@ const env = require('./../../core/env'), * for details on specification. */ module.exports = (app) => { - app.route(`/${env.emulatedStorageAccountName}/:queue/messages/:messageId*?`) - .get((req, res, next) => { - if (req.query.peekonly === 'true') { - req.azuriteOperation = Operations.Queue.PEEK_MESSAGES; - req.azuriteRequest = new AzuriteQueueRequest({ req: req }); - } else { - req.azuriteOperation = Operations.Queue.GET_MESSAGE; - req.azuriteRequest = new AzuriteQueueRequest({ req: req, operation: Operations.Queue.GET_MESSAGE }); - } - next(); - }) - .head((req, res, next) => { - next(); - }) - .put((req, res, next) => { - req.azuriteOperation = Operations.Queue.UPDATE_MESSAGE; - QueueMessageTextXmlModel.toJs(req.body) - .then((payload) => { - req.azuriteRequest = new AzuriteQueueRequest({ req: req, payload: payload }); - next(); - }); - }) - .post((req, res, next) => { - req.azuriteOperation = Operations.Queue.PUT_MESSAGE; - QueueMessageTextXmlModel.toJs(req.body) - .then((payload) => { - req.azuriteRequest = new AzuriteQueueRequest({ req: req, payload: payload }); - next(); - }); - }) - .delete((req, res, next) => { - if (req.params.messageId) { - req.azuriteOperation = Operations.Queue.DELETE_MESSAGE; - } else { - req.azuriteOperation = Operations.Queue.CLEAR_MESSAGES; - } - req.azuriteRequest = new AzuriteQueueRequest({ req: req }); - next(); + app + .route(`/${env.emulatedStorageAccountName}/:queue/messages/:messageId*?`) + .get((req, res, next) => { + if (req.query.peekonly === "true") { + req.azuriteOperation = Operations.Queue.PEEK_MESSAGES; + req.azuriteRequest = new AzuriteQueueRequest({ req: req }); + } else { + req.azuriteOperation = Operations.Queue.GET_MESSAGE; + req.azuriteRequest = new AzuriteQueueRequest({ + req: req, + operation: Operations.Queue.GET_MESSAGE, + }); + } + next(); + }) + .head((req, res, next) => { + next(); + }) + .put((req, res, next) => { + req.azuriteOperation = Operations.Queue.UPDATE_MESSAGE; + QueueMessageTextXmlModel.toJs(req.body).then((payload) => { + req.azuriteRequest = new AzuriteQueueRequest({ + req: req, + payload: payload, + }); + next(); + }); + }) + .post((req, res, next) => { + req.azuriteOperation = Operations.Queue.PUT_MESSAGE; + QueueMessageTextXmlModel.toJs(req.body).then((payload) => { + req.azuriteRequest = new AzuriteQueueRequest({ + req: req, + payload: payload, }); -} \ No newline at end of file + next(); + }); + }) + .delete((req, res, next) => { + if (req.params.messageId) { + req.azuriteOperation = Operations.Queue.DELETE_MESSAGE; + } else { + req.azuriteOperation = Operations.Queue.CLEAR_MESSAGES; + } + req.azuriteRequest = new AzuriteQueueRequest({ req: req }); + next(); + }); +}; diff --git a/lib/routes/queue/QueueRoute.js b/lib/routes/queue/QueueRoute.js index 038a3d554..3486fccbb 100644 --- a/lib/routes/queue/QueueRoute.js +++ b/lib/routes/queue/QueueRoute.js @@ -1,9 +1,11 @@ -'use strict'; +/** @format */ -const env = require('./../../core/env'), - AzuriteQueueRequest = require('../../model/queue/AzuriteQueueRequest'), - Serializers = require('./../../xml/Serializers'), - Operations = require('./../../core/Constants').Operations; +"use strict"; + +const env = require("./../../core/env"), + AzuriteQueueRequest = require("../../model/queue/AzuriteQueueRequest"), + Serializers = require("./../../xml/Serializers"), + Operations = require("./../../core/Constants").Operations; /* * Route definitions for all operation on the 'queue' resource type. @@ -11,45 +13,50 @@ const env = require('./../../core/env'), * for details on specification. */ module.exports = (app) => { - app.route(`/${env.emulatedStorageAccountName}/:queue/`) - .get((req, res, next) => { - if (req.query.comp === 'metadata') { - req.azuriteOperation = Operations.Queue.GET_QUEUE_METADATA; - } else if (req.query.comp === 'acl') { - req.azuriteOperation = Operations.Queue.GET_QUEUE_ACL; - } - req.azuriteRequest = new AzuriteQueueRequest({ req: req }); - next(); - }) - .head((req, res, next) => { - if (req.query.comp === 'metadata') { - req.azuriteOperation = Operations.Queue.GET_QUEUE_METADATA; - } else if (req.query.comp === 'acl') { - req.azuriteOperation = Operations.Queue.GET_QUEUE_ACL; - } - req.azuriteRequest = new AzuriteQueueRequest({ req: req }); - next(); - }) - .put((req, res, next) => { - if (req.query.comp === 'metadata') { - req.azuriteOperation = Operations.Queue.SET_QUEUE_METADATA; - } else if (req.query.comp === 'acl') { - req.azuriteOperation = Operations.Queue.SET_QUEUE_ACL; - Serializers.parseSignedIdentifiers(req.body) - .then((signedIdentifiers) => { - req.azuriteRequest = new AzuriteQueueRequest({ req: req, payload: signedIdentifiers }); - next(); - }); - return; - } else { - req.azuriteOperation = Operations.Queue.CREATE_QUEUE; - } - req.azuriteRequest = new AzuriteQueueRequest({ req: req }); - next(); - }) - .delete((req, res, next) => { - req.azuriteOperation = Operations.Queue.DELETE_QUEUE; - req.azuriteRequest = new AzuriteQueueRequest({ req: req }); + app + .route(`/${env.emulatedStorageAccountName}/:queue/`) + .get((req, res, next) => { + if (req.query.comp === "metadata") { + req.azuriteOperation = Operations.Queue.GET_QUEUE_METADATA; + } else if (req.query.comp === "acl") { + req.azuriteOperation = Operations.Queue.GET_QUEUE_ACL; + } + req.azuriteRequest = new AzuriteQueueRequest({ req: req }); + next(); + }) + .head((req, res, next) => { + if (req.query.comp === "metadata") { + req.azuriteOperation = Operations.Queue.GET_QUEUE_METADATA; + } else if (req.query.comp === "acl") { + req.azuriteOperation = Operations.Queue.GET_QUEUE_ACL; + } + req.azuriteRequest = new AzuriteQueueRequest({ req: req }); + next(); + }) + .put((req, res, next) => { + if (req.query.comp === "metadata") { + req.azuriteOperation = Operations.Queue.SET_QUEUE_METADATA; + } else if (req.query.comp === "acl") { + req.azuriteOperation = Operations.Queue.SET_QUEUE_ACL; + Serializers.parseSignedIdentifiers(req.body).then( + (signedIdentifiers) => { + req.azuriteRequest = new AzuriteQueueRequest({ + req: req, + payload: signedIdentifiers, + }); next(); - }); -} \ No newline at end of file + } + ); + return; + } else { + req.azuriteOperation = Operations.Queue.CREATE_QUEUE; + } + req.azuriteRequest = new AzuriteQueueRequest({ req: req }); + next(); + }) + .delete((req, res, next) => { + req.azuriteOperation = Operations.Queue.DELETE_QUEUE; + req.azuriteRequest = new AzuriteQueueRequest({ req: req }); + next(); + }); +}; diff --git a/lib/routes/table/EntityRoute.js b/lib/routes/table/EntityRoute.js index c0f5d90bc..86ac92fec 100644 --- a/lib/routes/table/EntityRoute.js +++ b/lib/routes/table/EntityRoute.js @@ -1,9 +1,11 @@ -'use strict'; +/** @format */ -const env = require('./../../core/env'), - AzuriteTableRequest = require('./../../model/table/AzuriteTableRequest'), - N = require('./../../core/HttpHeaderNames'), - Operations = require('./../../core/Constants').Operations.Table; +"use strict"; + +const env = require("./../../core/env"), + AzuriteTableRequest = require("./../../model/table/AzuriteTableRequest"), + N = require("./../../core/HttpHeaderNames"), + Operations = require("./../../core/Constants").Operations.Table; /* * Route definitions for all operation on the 'message' resource type. @@ -11,43 +13,55 @@ const env = require('./../../core/env'), * for details on specification. */ module.exports = (app) => { - app.route(new RegExp(`\/${env.emulatedStorageAccountName}\/([A-Za-z0-9]+)(.*)`)) - .get((req, res, next) => { - if (req.azuriteOperation === undefined) { - req.azuriteOperation = Operations.QUERY_ENTITY; - req.azuriteRequest = new AzuriteTableRequest({ req: req }); - } - next(); - }) - .head((req, res, next) => { - next(); - }) - .put((req, res, next) => { - if (req.azuriteOperation === undefined) { - req.azuriteRequest = new AzuriteTableRequest({ req: req, payload: req.payload }); - req.azuriteOperation = req.azuriteRequest.httpProps[N.IF_MATCH] - ? Operations.UPDATE_ENTITY - : Operations.INSERT_OR_REPLACE_ENTITY; - } - next(); - }) - .post((req, res, next) => { - if (req.azuriteOperation === undefined) { - req.azuriteOperation = Operations.INSERT_ENTITY - req.azuriteRequest = new AzuriteTableRequest({ req: req, payload: req.payload }); - } - next(); - }) - .delete((req, res, next) => { - req.azuriteOperation = Operations.DELETE_ENTITY; - req.azuriteRequest = new AzuriteTableRequest({ req: req }); - next(); - }) - .merge((req, res, next) => { - req.azuriteRequest = new AzuriteTableRequest({ req: req, payload: req.payload }); - req.azuriteOperation = req.azuriteRequest.httpProps[N.IF_MATCH] - ? Operations.MERGE_ENTITY - : Operations.INSERT_OR_MERGE_ENTITY; - next(); + app + .route( + new RegExp(`\/${env.emulatedStorageAccountName}\/([A-Za-z0-9]+)(.*)`) + ) + .get((req, res, next) => { + if (req.azuriteOperation === undefined) { + req.azuriteOperation = Operations.QUERY_ENTITY; + req.azuriteRequest = new AzuriteTableRequest({ req: req }); + } + next(); + }) + .head((req, res, next) => { + next(); + }) + .put((req, res, next) => { + if (req.azuriteOperation === undefined) { + req.azuriteRequest = new AzuriteTableRequest({ + req: req, + payload: req.payload, + }); + req.azuriteOperation = req.azuriteRequest.httpProps[N.IF_MATCH] + ? Operations.UPDATE_ENTITY + : Operations.INSERT_OR_REPLACE_ENTITY; + } + next(); + }) + .post((req, res, next) => { + if (req.azuriteOperation === undefined) { + req.azuriteOperation = Operations.INSERT_ENTITY; + req.azuriteRequest = new AzuriteTableRequest({ + req: req, + payload: req.payload, }); -} \ No newline at end of file + } + next(); + }) + .delete((req, res, next) => { + req.azuriteOperation = Operations.DELETE_ENTITY; + req.azuriteRequest = new AzuriteTableRequest({ req: req }); + next(); + }) + .merge((req, res, next) => { + req.azuriteRequest = new AzuriteTableRequest({ + req: req, + payload: req.payload, + }); + req.azuriteOperation = req.azuriteRequest.httpProps[N.IF_MATCH] + ? Operations.MERGE_ENTITY + : Operations.INSERT_OR_MERGE_ENTITY; + next(); + }); +}; diff --git a/lib/routes/table/TableRoute.js b/lib/routes/table/TableRoute.js index 48f88774b..26a680f90 100644 --- a/lib/routes/table/TableRoute.js +++ b/lib/routes/table/TableRoute.js @@ -1,8 +1,10 @@ -'use strict'; +/** @format */ -const env = require('./../../core/env'), - AzuriteTableRequest = require('./../../model/table/AzuriteTableRequest'), - Operations = require('./../../core/Constants').Operations.Table; +"use strict"; + +const env = require("./../../core/env"), + AzuriteTableRequest = require("./../../model/table/AzuriteTableRequest"), + Operations = require("./../../core/Constants").Operations.Table; /* * Route definitions for all operation on the 'message' resource type. @@ -10,26 +12,30 @@ const env = require('./../../core/env'), * for details on specification. */ module.exports = (app) => { - app.route(new RegExp(`\/${env.emulatedStorageAccountName}\/Tables(.*)`)) - .get((req, res, next) => { - req.azuriteOperation = Operations.QUERY_TABLE; - req.azuriteRequest = new AzuriteTableRequest({ req: req }); - next(); - }) - .head((req, res, next) => { - next(); - }) - .put((req, res, next) => { - next(); - }) - .post((req, res, next) => { - req.azuriteOperation = Operations.CREATE_TABLE; - req.azuriteRequest = new AzuriteTableRequest({ req: req, payload: req.payload }); - next(); - }) - .delete((req, res, next) => { - req.azuriteOperation = Operations.DELETE_TABLE; - req.azuriteRequest = new AzuriteTableRequest({ req: req }); - next(); - }); -} \ No newline at end of file + app + .route(new RegExp(`\/${env.emulatedStorageAccountName}\/Tables(.*)`)) + .get((req, res, next) => { + req.azuriteOperation = Operations.QUERY_TABLE; + req.azuriteRequest = new AzuriteTableRequest({ req: req }); + next(); + }) + .head((req, res, next) => { + next(); + }) + .put((req, res, next) => { + next(); + }) + .post((req, res, next) => { + req.azuriteOperation = Operations.CREATE_TABLE; + req.azuriteRequest = new AzuriteTableRequest({ + req: req, + payload: req.payload, + }); + next(); + }) + .delete((req, res, next) => { + req.azuriteOperation = Operations.DELETE_TABLE; + req.azuriteRequest = new AzuriteTableRequest({ req: req }); + next(); + }); +}; diff --git a/lib/validation/NumOfSignedIdentifiers.js b/lib/validation/NumOfSignedIdentifiers.js index a86864bcb..4f373a978 100644 --- a/lib/validation/NumOfSignedIdentifiers.js +++ b/lib/validation/NumOfSignedIdentifiers.js @@ -1,22 +1,23 @@ -'use strict'; +/** @format */ -const AError = require('./../core/AzuriteError'), - ErrorCodes = require('./../core/ErrorCodes'); +"use strict"; + +const AError = require("./../core/AzuriteError"), + ErrorCodes = require("./../core/ErrorCodes"); /** * Checks whether the number of signed identifiers is at most 5. * See https://docs.microsoft.com/rest/api/storageservices/fileservices/establishing-a-stored-access-policy for spec. */ class NumOfSignedIdentifiers { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - const si = request.payload; - if ((si !== null || si !== undefined) && si.length > 5) { - throw new AError(ErrorCodes.InvalidInput); - } + validate({ request = undefined }) { + const si = request.payload; + if ((si !== null || si !== undefined) && si.length > 5) { + throw new AError(ErrorCodes.InvalidInput); } + } } -module.exports = new NumOfSignedIdentifiers(); \ No newline at end of file +module.exports = new NumOfSignedIdentifiers(); diff --git a/lib/validation/blob/AbortCopy.js b/lib/validation/blob/AbortCopy.js index b9911d26d..c5be697a6 100644 --- a/lib/validation/blob/AbortCopy.js +++ b/lib/validation/blob/AbortCopy.js @@ -1,23 +1,24 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - CopyOperationsManager = require('./../../core/blob/CopyOperationsManager'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + CopyOperationsManager = require("./../../core/blob/CopyOperationsManager"), + ErrorCodes = require("./../../core/ErrorCodes"); /** - * Checks whether there is no pending copy operation. - * + * Checks whether there is no pending copy operation. + * * @class AbortCopy */ class AbortCopy { - constructor() { - } + constructor() {} - validate() { - if (!CopyOperationsManager.isPending()) { - throw new AError(ErrorCodes.NoPendingCopyOperation); - } + validate() { + if (!CopyOperationsManager.isPending()) { + throw new AError(ErrorCodes.NoPendingCopyOperation); } + } } -module.exports = new AbortCopy(); \ No newline at end of file +module.exports = new AbortCopy(); diff --git a/lib/validation/blob/AppendBlobConditionalRequestHeaders.js b/lib/validation/blob/AppendBlobConditionalRequestHeaders.js index ef22a3130..8859ddb29 100644 --- a/lib/validation/blob/AppendBlobConditionalRequestHeaders.js +++ b/lib/validation/blob/AppendBlobConditionalRequestHeaders.js @@ -1,33 +1,38 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + ErrorCodes = require("./../../core/ErrorCodes"); /** * Checks whether the following conditional request headers specific to an AppendBlob are satisfied. * See https://docs.microsoft.com/rest/api/storageservices/append-block for details. - * + * * - x-ms-blob-condition-maxsize * - x-ms-blob-condition-appendpos - * + * * @class AppendBlobConditionalRequestHeaders */ class AppendBlobConditionalRequestHeaders { - constructor() { - } + constructor() {} - validate({ request = undefined, blobProxy = undefined }) { - const maxSize = request.httpProps[N.BLOB_CONDITION_MAX_SIZE], - appendPos = request.httpProps[N.BLOB_CONDITION_APPENDPOS]; + validate({ request = undefined, blobProxy = undefined }) { + const maxSize = request.httpProps[N.BLOB_CONDITION_MAX_SIZE], + appendPos = request.httpProps[N.BLOB_CONDITION_APPENDPOS]; - if (maxSize !== undefined && (blobProxy.original.size > maxSize || (blobProxy.original.size + request.body.length) > maxSize)) { - throw new AError(ErrorCodes.MaxBlobSizeConditionNotMet); - } - if (appendPos !== undefined && blobProxy.original.size !== appendPos) { - throw new AError(ErrorCodes.AppendPositionConditionNotMet); - } + if ( + maxSize !== undefined && + (blobProxy.original.size > maxSize || + blobProxy.original.size + request.body.length > maxSize) + ) { + throw new AError(ErrorCodes.MaxBlobSizeConditionNotMet); + } + if (appendPos !== undefined && blobProxy.original.size !== appendPos) { + throw new AError(ErrorCodes.AppendPositionConditionNotMet); } + } } module.exports = new AppendBlobConditionalRequestHeaders(); diff --git a/lib/validation/blob/AppendMaxBlobCommittedBlocks.js b/lib/validation/blob/AppendMaxBlobCommittedBlocks.js index 1353ea46a..17469fe88 100644 --- a/lib/validation/blob/AppendMaxBlobCommittedBlocks.js +++ b/lib/validation/blob/AppendMaxBlobCommittedBlocks.js @@ -1,23 +1,24 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + ErrorCodes = require("./../../core/ErrorCodes"); /** - * Checks whether the total number of committed blocks present in this append blob does not exceed 50,000. - * + * Checks whether the total number of committed blocks present in this append blob does not exceed 50,000. + * * @class AppendMaxBlobCommittedBlocks */ class AppendMaxBlobCommittedBlocks { - constructor() { - } + constructor() {} - validate({ blobProxy = undefined }) { - if (blobProxy.original[N.BLOB_COMMITTED_BLOCK_COUNT] + 1 > 50000) { - throw new AError(ErrorCodes.BlockCountExceedsLimit); - } + validate({ blobProxy = undefined }) { + if (blobProxy.original[N.BLOB_COMMITTED_BLOCK_COUNT] + 1 > 50000) { + throw new AError(ErrorCodes.BlockCountExceedsLimit); } + } } -module.exports = new AppendMaxBlobCommittedBlocks(); \ No newline at end of file +module.exports = new AppendMaxBlobCommittedBlocks(); diff --git a/lib/validation/blob/AssociatedSnapshotsDeletion.js b/lib/validation/blob/AssociatedSnapshotsDeletion.js index c2401d829..216e8b5ab 100644 --- a/lib/validation/blob/AssociatedSnapshotsDeletion.js +++ b/lib/validation/blob/AssociatedSnapshotsDeletion.js @@ -1,8 +1,10 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + ErrorCodes = require("./../../core/ErrorCodes"); /* * Checks whether the blob to be deleted has any associated snapshots and - if this is true - has according @@ -10,36 +12,44 @@ const AError = require('./../../core/AzuriteError'), * Also checks whether above header is specified on a blob (valid) or a snapshot (not valid). */ class AssociatedSnapshotsDeletion { - constructor() { + constructor() {} + + validate({ request = undefined, moduleOptions = undefined }) { + // If a snapshot is requested to be deleted this validation rule is not relevant + if (request.isSnapshot()) { + return; } - validate({ request = undefined, moduleOptions = undefined }) { - // If a snapshot is requested to be deleted this validation rule is not relevant - if (request.isSnapshot()) { - return; - } - - const collection = moduleOptions.collection; - // This header (x-ms-delete-snapshots) should be specified only for a request against the base blob resource. - // If this header is specified on a request to delete an individual snapshot, the Blob service returns status code 400 (Bad Request). - if (request.httpProps[N.DELETE_SNAPSHOTS] !== undefined && request.isSnapshot()) { - throw new AError(ErrorCodes.UnsupportedHeader); - } + const collection = moduleOptions.collection; + // This header (x-ms-delete-snapshots) should be specified only for a request against the base blob resource. + // If this header is specified on a request to delete an individual snapshot, the Blob service returns status code 400 (Bad Request). + if ( + request.httpProps[N.DELETE_SNAPSHOTS] !== undefined && + request.isSnapshot() + ) { + throw new AError(ErrorCodes.UnsupportedHeader); + } - // If this header (x-ms-delete-snapshots) is not specified on the request and the blob has associated snapshots, the Blob service returns status code 409 (Conflict). - const snapshots = collection.chain().find({ 'originId': { '$eq': request.id } }).data(); - // If the blob has associated snapshots... - if (snapshots.length > 0) { - // return 409 (Conflict) if header (x-ms-delete-snapshots) is not specified on the request - if (request.httpProps[N.DELETE_SNAPSHOTS] === undefined) { - throw new AError(ErrorCodes.SnapshotsPresent); - } - // return 400 (Error) if header (x-ms-delete-snapshots) has invalid values - if (request.httpProps[N.DELETE_SNAPSHOTS] !== 'include' && request.httpProps[N.DELETE_SNAPSHOTS] !== 'only') { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - } + // If this header (x-ms-delete-snapshots) is not specified on the request and the blob has associated snapshots, the Blob service returns status code 409 (Conflict). + const snapshots = collection + .chain() + .find({ originId: { $eq: request.id } }) + .data(); + // If the blob has associated snapshots... + if (snapshots.length > 0) { + // return 409 (Conflict) if header (x-ms-delete-snapshots) is not specified on the request + if (request.httpProps[N.DELETE_SNAPSHOTS] === undefined) { + throw new AError(ErrorCodes.SnapshotsPresent); + } + // return 400 (Error) if header (x-ms-delete-snapshots) has invalid values + if ( + request.httpProps[N.DELETE_SNAPSHOTS] !== "include" && + request.httpProps[N.DELETE_SNAPSHOTS] !== "only" + ) { + throw new AError(ErrorCodes.InvalidHeaderValue); + } } + } } -module.exports = new AssociatedSnapshotsDeletion(); \ No newline at end of file +module.exports = new AssociatedSnapshotsDeletion(); diff --git a/lib/validation/blob/BlobCommitted.js b/lib/validation/blob/BlobCommitted.js index 658ed3915..c5ed55e14 100644 --- a/lib/validation/blob/BlobCommitted.js +++ b/lib/validation/blob/BlobCommitted.js @@ -1,7 +1,9 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); /* * Checks whether the blob has been committed yet as part of PUT BlockList. @@ -9,18 +11,17 @@ const AError = require('./../../core/AzuriteError'), * such as GET Blob. */ class BlobCommitted { - constructor() { - } + constructor() {} - validate({ blobProxy = undefined }) { - if (blobProxy === undefined) { - throw new AError(ErrorCodes.BlobNotFound); - } + validate({ blobProxy = undefined }) { + if (blobProxy === undefined) { + throw new AError(ErrorCodes.BlobNotFound); + } - if (!blobProxy.original.committed) { - throw new AError(ErrorCodes.BlobNotFound); - } + if (!blobProxy.original.committed) { + throw new AError(ErrorCodes.BlobNotFound); } + } } -module.exports = new BlobCommitted(); \ No newline at end of file +module.exports = new BlobCommitted(); diff --git a/lib/validation/blob/BlobCreationSize.js b/lib/validation/blob/BlobCreationSize.js index 9d3c81ba2..aa0f15dcf 100644 --- a/lib/validation/blob/BlobCreationSize.js +++ b/lib/validation/blob/BlobCreationSize.js @@ -1,25 +1,30 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - EntityType = require('./../../core/Constants').StorageEntityType, - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + EntityType = require("./../../core/Constants").StorageEntityType, + ErrorCodes = require("./../../core/ErrorCodes"); class BlobCreationSize { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - // Append and Page Blobs must not be larger than 0 bytes - if ((request.entityType === EntityType.AppendBlob || - request.entityType === EntityType.PageBlob) && - request.body.length > 0) { - throw new AError(ErrorCodes.InvalidBlobType); - } - if (request.entityType === EntityType.BlockBlob && - request.body.length > 268435456) { - throw new AError(ErrorCodes.RequestBodyTooLarge); - } + validate({ request = undefined }) { + // Append and Page Blobs must not be larger than 0 bytes + if ( + (request.entityType === EntityType.AppendBlob || + request.entityType === EntityType.PageBlob) && + request.body.length > 0 + ) { + throw new AError(ErrorCodes.InvalidBlobType); + } + if ( + request.entityType === EntityType.BlockBlob && + request.body.length > 268435456 + ) { + throw new AError(ErrorCodes.RequestBodyTooLarge); } + } } -module.exports = new BlobCreationSize(); \ No newline at end of file +module.exports = new BlobCreationSize(); diff --git a/lib/validation/blob/BlobExists.js b/lib/validation/blob/BlobExists.js index 4b716e771..f6ffc1027 100644 --- a/lib/validation/blob/BlobExists.js +++ b/lib/validation/blob/BlobExists.js @@ -1,21 +1,22 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); /* * Checks whether the blob exists. * Source of truth is the in-memory DB, not the filesystem. */ class BlobExists { - constructor() { - } + constructor() {} - validate({ blobProxy = undefined }) { - if (blobProxy === undefined) { - throw new AError(ErrorCodes.BlobNotFound); - } + validate({ blobProxy = undefined }) { + if (blobProxy === undefined) { + throw new AError(ErrorCodes.BlobNotFound); } + } } -module.exports = new BlobExists; \ No newline at end of file +module.exports = new BlobExists(); diff --git a/lib/validation/blob/BlobLeaseUsage.js b/lib/validation/blob/BlobLeaseUsage.js index bd126a0a5..fff2873b8 100644 --- a/lib/validation/blob/BlobLeaseUsage.js +++ b/lib/validation/blob/BlobLeaseUsage.js @@ -1,68 +1,81 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - LeaseStatus = require('./../../core/Constants').LeaseStatus, - Usage = require('./../../core/Constants').Usage, - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + LeaseStatus = require("./../../core/Constants").LeaseStatus, + Usage = require("./../../core/Constants").Usage, + ErrorCodes = require("./../../core/ErrorCodes"); class BlobLeaseUsage { - constructor() { + constructor() {} + + /** + * Checks whether intended lease usage operation is semantically valid as specified + * at https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob + */ + validate({ + request = undefined, + blobProxy = undefined, + moduleOptions = undefined, + }) { + if (blobProxy === undefined) { + return; } - /** - * Checks whether intended lease usage operation is semantically valid as specified - * at https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob - */ - validate({ request = undefined, blobProxy = undefined, moduleOptions = undefined }) { - if (blobProxy === undefined) { - return; - } - - const leaseId = request.leaseId(), - usage = moduleOptions.usage; + const leaseId = request.leaseId(), + usage = moduleOptions.usage; - blobProxy.updateLeaseState(); + blobProxy.updateLeaseState(); - switch (blobProxy.original.leaseState) { - case LeaseStatus.AVAILABLE: - if (leaseId) { - throw new AError(ErrorCodes.LeaseNotPresentWithBlobOperation); - } - break; - case LeaseStatus.LEASED: - if (usage === Usage.Write && leaseId === undefined) { - throw new AError(ErrorCodes.LeaseIdMissing); - } - if (usage === Usage.Write && leaseId !== blobProxy.original.leaseId) { - throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); - } - if (usage === Usage.Read && leaseId !== blobProxy.original.leaseId && leaseId !== undefined) { - throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); - } - break; - case LeaseStatus.BREAKING: - if (usage === Usage.Write && leaseId === undefined) { - throw new AError(ErrorCodes.LeaseIdMissing); - } - if (usage === Usage.Write && leaseId !== blobProxy.original.leaseId) { - throw new AError(ErrorCodes.LeaseIdMismatchWithBlobOperation); - } - if (usage === Usage.Read && leaseId !== undefined && leaseId !== blobProxy.original.leaseId) { - throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); - } - break; - case LeaseStatus.BROKEN: - if (leaseId) { - throw new AError(ErrorCodes.LeaseNotPresentWithBlobOperation); - } - break; - case LeaseStatus.EXPIRED: - if (leaseId) { - throw new AError(ErrorCodes.LeaseNotPresentWithBlobOperation); - } - break; + switch (blobProxy.original.leaseState) { + case LeaseStatus.AVAILABLE: + if (leaseId) { + throw new AError(ErrorCodes.LeaseNotPresentWithBlobOperation); + } + break; + case LeaseStatus.LEASED: + if (usage === Usage.Write && leaseId === undefined) { + throw new AError(ErrorCodes.LeaseIdMissing); + } + if (usage === Usage.Write && leaseId !== blobProxy.original.leaseId) { + throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); + } + if ( + usage === Usage.Read && + leaseId !== blobProxy.original.leaseId && + leaseId !== undefined + ) { + throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); + } + break; + case LeaseStatus.BREAKING: + if (usage === Usage.Write && leaseId === undefined) { + throw new AError(ErrorCodes.LeaseIdMissing); + } + if (usage === Usage.Write && leaseId !== blobProxy.original.leaseId) { + throw new AError(ErrorCodes.LeaseIdMismatchWithBlobOperation); + } + if ( + usage === Usage.Read && + leaseId !== undefined && + leaseId !== blobProxy.original.leaseId + ) { + throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); + } + break; + case LeaseStatus.BROKEN: + if (leaseId) { + throw new AError(ErrorCodes.LeaseNotPresentWithBlobOperation); + } + break; + case LeaseStatus.EXPIRED: + if (leaseId) { + throw new AError(ErrorCodes.LeaseNotPresentWithBlobOperation); } + break; } + } } -module.exports = new BlobLeaseUsage(); \ No newline at end of file +module.exports = new BlobLeaseUsage(); diff --git a/lib/validation/blob/BlobName.js b/lib/validation/blob/BlobName.js index dab7f8b0b..e75ac6988 100644 --- a/lib/validation/blob/BlobName.js +++ b/lib/validation/blob/BlobName.js @@ -1,25 +1,26 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); /* * Checks whether the blob name adheres to the naming convention when being created within the $root container * as specified at https://docs.microsoft.com/en-us/rest/api/storageservices/working-with-the-root-container */ class BlobName { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - const containerName = request.containerName, - blobName = request.blobName; - if (containerName === '$root') { - if (blobName && blobName.includes('/')) { - throw new AError(ErrorCodes.InvalidResourceName); - } - } + validate({ request = undefined }) { + const containerName = request.containerName, + blobName = request.blobName; + if (containerName === "$root") { + if (blobName && blobName.includes("/")) { + throw new AError(ErrorCodes.InvalidResourceName); + } } + } } -module.exports = new BlobName; \ No newline at end of file +module.exports = new BlobName(); diff --git a/lib/validation/blob/BlockList.js b/lib/validation/blob/BlockList.js index ef51fc758..7b3b84fb5 100644 --- a/lib/validation/blob/BlockList.js +++ b/lib/validation/blob/BlockList.js @@ -1,34 +1,42 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - env = require('./../../core/env'), - BlobExistsVal = require('./BlobExists'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + env = require("./../../core/env"), + BlobExistsVal = require("./BlobExists"), + ErrorCodes = require("./../../core/ErrorCodes"); class BlockList { - constructor() { - } + constructor() {} - /** - * Checks whether the blocklist is correct. It is correct if all block ids are existant in the database. - */ - validate({ request = undefined, moduleOptions = undefined }) { - const sm = moduleOptions.storageManager, - blockList = request.payload; - for (const block of blockList) { - const blobId = env.blockId(request.containerName, request.blobName, block.id); - const { blobProxy } = sm._getCollectionAndBlob(request.containerName, blobId); - try { - BlobExistsVal.validate({ blobProxy: blobProxy }); - } catch (e) { - if (e.statusCode === 404) { - throw new AError(ErrorCodes.InvalidBlockList); - } else { - throw e; // Something unexpected happened - } - } + /** + * Checks whether the blocklist is correct. It is correct if all block ids are existant in the database. + */ + validate({ request = undefined, moduleOptions = undefined }) { + const sm = moduleOptions.storageManager, + blockList = request.payload; + for (const block of blockList) { + const blobId = env.blockId( + request.containerName, + request.blobName, + block.id + ); + const { blobProxy } = sm._getCollectionAndBlob( + request.containerName, + blobId + ); + try { + BlobExistsVal.validate({ blobProxy: blobProxy }); + } catch (e) { + if (e.statusCode === 404) { + throw new AError(ErrorCodes.InvalidBlockList); + } else { + throw e; // Something unexpected happened } + } } + } } -module.exports = new BlockList(); \ No newline at end of file +module.exports = new BlockList(); diff --git a/lib/validation/blob/BlockPageSize.js b/lib/validation/blob/BlockPageSize.js index 2eb55fe86..e5b1fdd3f 100644 --- a/lib/validation/blob/BlockPageSize.js +++ b/lib/validation/blob/BlockPageSize.js @@ -1,48 +1,49 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - EntityType = require('./../../core/Constants').StorageEntityType, - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + EntityType = require("./../../core/Constants").StorageEntityType, + ErrorCodes = require("./../../core/ErrorCodes"); /** * Validates whether PUT Block, PUT AppendBlob, and PUT Page operations adhere * to allowed maximum size. - * + * * @class BlockPageSize */ class BlockPageSize { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - const size = request.body.length || request.httpProps[N.CONTENT_LENGTH]; - switch (request.entityType) { - case EntityType.BlockBlob: - // Blocks larger than 100MB are not allowed since API version 2016-05-31 - // see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/put-block - if (size > 104857600) { - throw new AErro(ErrorCodes.RequestBodyTooLarge); - } - break; - case EntityType.AppendBlob: - // ApppendBlocks larger than 4MB are not allowed as per specification at - // see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/append-block - if (size > 4194304) { - throw new AErro(ErrorCodes.RequestBodyTooLarge); - } - break; - case EntityType.PageBlob: - // Pages larger than 4MB are not allowed as per specification at - // https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/put-page - if (size > 4194304) { - throw new AErro(ErrorCodes.RequestBodyTooLarge); - } - break; - default: - throw new AError(ErrorCodes.InvalidBlobType); + validate({ request = undefined }) { + const size = request.body.length || request.httpProps[N.CONTENT_LENGTH]; + switch (request.entityType) { + case EntityType.BlockBlob: + // Blocks larger than 100MB are not allowed since API version 2016-05-31 + // see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/put-block + if (size > 104857600) { + throw new AErro(ErrorCodes.RequestBodyTooLarge); + } + break; + case EntityType.AppendBlob: + // ApppendBlocks larger than 4MB are not allowed as per specification at + // see https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/append-block + if (size > 4194304) { + throw new AErro(ErrorCodes.RequestBodyTooLarge); + } + break; + case EntityType.PageBlob: + // Pages larger than 4MB are not allowed as per specification at + // https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/put-page + if (size > 4194304) { + throw new AErro(ErrorCodes.RequestBodyTooLarge); } + break; + default: + throw new AError(ErrorCodes.InvalidBlobType); } + } } -module.exports = new BlockPageSize; \ No newline at end of file +module.exports = new BlockPageSize(); diff --git a/lib/validation/blob/CompatibleBlobType.js b/lib/validation/blob/CompatibleBlobType.js index f90a8ac82..1ba6c4a72 100644 --- a/lib/validation/blob/CompatibleBlobType.js +++ b/lib/validation/blob/CompatibleBlobType.js @@ -1,21 +1,22 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); class CompatibleBlobType { - constructor() { - } + constructor() {} - validate({ request = undefined, blobProxy = undefined }) { - // skipped if blob is created, not updated - if (blobProxy === undefined) { - return; - } - if (request.entityType !== blobProxy.original.entityType) { - throw new AError(ErrorCodes.InvalidBlobType); - } + validate({ request = undefined, blobProxy = undefined }) { + // skipped if blob is created, not updated + if (blobProxy === undefined) { + return; + } + if (request.entityType !== blobProxy.original.entityType) { + throw new AError(ErrorCodes.InvalidBlobType); } + } } -module.exports = new CompatibleBlobType(); \ No newline at end of file +module.exports = new CompatibleBlobType(); diff --git a/lib/validation/blob/ConditionalRequestHeaders.js b/lib/validation/blob/ConditionalRequestHeaders.js index 325c35a42..3d0d038ff 100644 --- a/lib/validation/blob/ConditionalRequestHeaders.js +++ b/lib/validation/blob/ConditionalRequestHeaders.js @@ -1,73 +1,96 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'), - EntityType = require('./../../core/Constants').StorageEntityType, - N = require('./../../core/HttpHeaderNames'), - Usage = require('./../../core/Constants').Usage; +"use strict"; -class ConditionalRequestHeaders { - constructor() { - } +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"), + EntityType = require("./../../core/Constants").StorageEntityType, + N = require("./../../core/HttpHeaderNames"), + Usage = require("./../../core/Constants").Usage; - /** - * Checks whether the following conditional request headers are satisfied. - * - If-Modified-Since - * - If-Unmodified-Since - * - If-Match - * - If-None-Match - */ - validate({ request = undefined, containerProxy = undefined, blobProxy = undefined, moduleOptions = undefined }) { - const proxy = request.entityType === EntityType.Container ? containerProxy : blobProxy, - ifMatchVal = request.httpProps[N.IF_MATCH], - ifNoneMatchVal = request.httpProps[N.IF_NONE_MATCH], - ifModifiedSinceVal = (request.httpProps[N.IF_MODFIFIED_SINCE]) ? new Date(request.httpProps[N.IF_MODFIFIED_SINCE]) : undefined, - ifUnmodifiedSinceVal = (request.httpProps[N.IF_UNMODIFIED_SINCE]) ? new Date(request.httpProps[N.IF_UNMODIFIED_SINCE]) : undefined, - usage = moduleOptions.usage; +class ConditionalRequestHeaders { + constructor() {} - // If the storage has not been created yet, but conditional headers are specified the operation fails with 412 - if (proxy === undefined) { - if (ifMatchVal) { - throw new AError(ErrorCodes.ConditionNotMetWrite); // 412 + /** + * Checks whether the following conditional request headers are satisfied. + * - If-Modified-Since + * - If-Unmodified-Since + * - If-Match + * - If-None-Match + */ + validate({ + request = undefined, + containerProxy = undefined, + blobProxy = undefined, + moduleOptions = undefined, + }) { + const proxy = + request.entityType === EntityType.Container + ? containerProxy + : blobProxy, + ifMatchVal = request.httpProps[N.IF_MATCH], + ifNoneMatchVal = request.httpProps[N.IF_NONE_MATCH], + ifModifiedSinceVal = request.httpProps[N.IF_MODFIFIED_SINCE] + ? new Date(request.httpProps[N.IF_MODFIFIED_SINCE]) + : undefined, + ifUnmodifiedSinceVal = request.httpProps[N.IF_UNMODIFIED_SINCE] + ? new Date(request.httpProps[N.IF_UNMODIFIED_SINCE]) + : undefined, + usage = moduleOptions.usage; - } - return; - } - // If wildcard character is specified, perform the operation only if the resource does not exist, and fail the operation if it does exist. - // Resource does not exist if there is no proxy available or if there is a proxy available but the blob has not been committed yet. - if (ifNoneMatchVal === '*' && (blobProxy === undefined || blobProxy.original.committed === true)) { - throw new AError(ErrorCodes.BlobAlreadyExists); - } + // If the storage has not been created yet, but conditional headers are specified the operation fails with 412 + if (proxy === undefined) { + if (ifMatchVal) { + throw new AError(ErrorCodes.ConditionNotMetWrite); // 412 + } + return; + } + // If wildcard character is specified, perform the operation only if the resource does not exist, and fail the operation if it does exist. + // Resource does not exist if there is no proxy available or if there is a proxy available but the blob has not been committed yet. + if ( + ifNoneMatchVal === "*" && + (blobProxy === undefined || blobProxy.original.committed === true) + ) { + throw new AError(ErrorCodes.BlobAlreadyExists); + } - const ETagVal = `\"${proxy.original.etag}\"`, - lastModifiedVal = new Date(proxy.lastModified()), - ifModifiedSince = ifModifiedSinceVal < lastModifiedVal, // operation will be performed only if it has been modified since the specified time - ifUnmodifiedSince = ifUnmodifiedSinceVal >= lastModifiedVal, // operation will be performed only if it has _not_ been modified since the specified time - ifMatch = ifMatchVal !== undefined && (ifMatchVal === ETagVal || ifMatchVal === '*'), - ifNoneMatch = ifNoneMatchVal !== undefined && ifNoneMatchVal !== ETagVal; + const ETagVal = `\"${proxy.original.etag}\"`, + lastModifiedVal = new Date(proxy.lastModified()), + ifModifiedSince = ifModifiedSinceVal < lastModifiedVal, // operation will be performed only if it has been modified since the specified time + ifUnmodifiedSince = ifUnmodifiedSinceVal >= lastModifiedVal, // operation will be performed only if it has _not_ been modified since the specified time + ifMatch = + ifMatchVal !== undefined && + (ifMatchVal === ETagVal || ifMatchVal === "*"), + ifNoneMatch = ifNoneMatchVal !== undefined && ifNoneMatchVal !== ETagVal; - switch (usage) { - case Usage.Read: - if ((ifMatchVal !== undefined && !ifMatch) || - (ifUnmodifiedSinceVal !== undefined && !ifUnmodifiedSince)) { - throw new AError(ErrorCodes.ConditionNotMetWrite); // 412 - } + switch (usage) { + case Usage.Read: + if ( + (ifMatchVal !== undefined && !ifMatch) || + (ifUnmodifiedSinceVal !== undefined && !ifUnmodifiedSince) + ) { + throw new AError(ErrorCodes.ConditionNotMetWrite); // 412 + } - if ((ifNoneMatchVal !== undefined && !ifNoneMatch) || - (ifModifiedSinceVal && !ifModifiedSince)) { - throw new AError(ErrorCodes.ConditionNotMetRead); // 304 - } - break; - case Usage.Write: - if (ifMatchVal !== undefined && !ifMatch || - ifUnmodifiedSinceVal !== undefined && !ifUnmodifiedSince || - ifNoneMatchVal !== undefined && !ifNoneMatch || - ifModifiedSinceVal !== undefined && !ifModifiedSince) { - throw new AError(ErrorCodes.ConditionNotMetWrite); // 412 - } - break; + if ( + (ifNoneMatchVal !== undefined && !ifNoneMatch) || + (ifModifiedSinceVal && !ifModifiedSince) + ) { + throw new AError(ErrorCodes.ConditionNotMetRead); // 304 + } + break; + case Usage.Write: + if ( + (ifMatchVal !== undefined && !ifMatch) || + (ifUnmodifiedSinceVal !== undefined && !ifUnmodifiedSince) || + (ifNoneMatchVal !== undefined && !ifNoneMatch) || + (ifModifiedSinceVal !== undefined && !ifModifiedSince) + ) { + throw new AError(ErrorCodes.ConditionNotMetWrite); // 412 } + break; } + } } -module.exports = new ConditionalRequestHeaders(); \ No newline at end of file +module.exports = new ConditionalRequestHeaders(); diff --git a/lib/validation/blob/ConflictingContainer.js b/lib/validation/blob/ConflictingContainer.js index baa30cc6e..a471c68a5 100644 --- a/lib/validation/blob/ConflictingContainer.js +++ b/lib/validation/blob/ConflictingContainer.js @@ -1,21 +1,22 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); /* * Checks whether the container that is to be created already exists. * Source of truth is the in-memory DB, not the filesystem. */ class ConflictingContainer { - constructor() { - } + constructor() {} - validate({ containerProxy = undefined }) { - if (containerProxy !== undefined) { - throw new AError(ErrorCodes.ContainerAlreadyExists); - } + validate({ containerProxy = undefined }) { + if (containerProxy !== undefined) { + throw new AError(ErrorCodes.ContainerAlreadyExists); } + } } -module.exports = new ConflictingContainer(); \ No newline at end of file +module.exports = new ConflictingContainer(); diff --git a/lib/validation/blob/ContainerExists.js b/lib/validation/blob/ContainerExists.js index 6d4ba2a89..742d0fab3 100644 --- a/lib/validation/blob/ContainerExists.js +++ b/lib/validation/blob/ContainerExists.js @@ -1,21 +1,22 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); /* * Checks whether the container exists. * Source of truth is the in-memory DB, not the filesystem. */ class ContainerExists { - constructor() { - } + constructor() {} - validate({ containerProxy = undefined }) { - if (containerProxy === undefined) { - throw new AError(ErrorCodes.ContainerNotFound); - } + validate({ containerProxy = undefined }) { + if (containerProxy === undefined) { + throw new AError(ErrorCodes.ContainerNotFound); } + } } -module.exports = new ContainerExists; \ No newline at end of file +module.exports = new ContainerExists(); diff --git a/lib/validation/blob/ContainerLeaseUsage.js b/lib/validation/blob/ContainerLeaseUsage.js index 714d34f48..edf628993 100644 --- a/lib/validation/blob/ContainerLeaseUsage.js +++ b/lib/validation/blob/ContainerLeaseUsage.js @@ -1,66 +1,85 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'), - Usage = require('./../../core/Constants').Usage, - LeaseStatus = require('./../../core/Constants').LeaseStatus; +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"), + Usage = require("./../../core/Constants").Usage, + LeaseStatus = require("./../../core/Constants").LeaseStatus; /** * Checks whether intended lease usage operation is semantically valid as specified * at https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * + * * @class ContainerLeaseUsage */ class ContainerLeaseUsage { - constructor() { - } + constructor() {} - validate({ request = undefined, containerProxy = undefined, moduleOptions = undefined }) { - const leaseId = request.leaseId(), - usage = moduleOptions.usage; + validate({ + request = undefined, + containerProxy = undefined, + moduleOptions = undefined, + }) { + const leaseId = request.leaseId(), + usage = moduleOptions.usage; - containerProxy.updateLeaseState(); + containerProxy.updateLeaseState(); - switch (containerProxy.original.leaseState) { - case LeaseStatus.AVAILABLE: - if (leaseId) { - throw new AError(ErrorCodes.LeaseNotPresentWithContainerOperation); - } - break; - case LeaseStatus.LEASED: - if (usage === Usage.Delete && !leaseId) { - throw new AError(ErrorCodes.LeaseIdMissing); - } - if (usage === Usage.Delete && leaseId !== containerProxy.original.leaseId) { - throw new AError(ErrorCodes.LeaseIdMismatchWithContainerOperation); - } - if (usage === Usage.Other && leaseId !== containerProxy.original.leaseId && leaseId !== undefined) { - throw new AError(ErrorCodes.LeaseIdMismatchWithContainerOperation); - } - break; - case LeaseStatus.BREAKING: - if (usage === Usage.Delete && leaseId !== containerProxy.original.leaseId) { - throw new AError(ErrorCodes.LeaseIdMismatchWithContainerOperation); - } - if (usage === Usage.Delete && !leaseId) { - throw new AError(ErrorCodes.LeaseIdMissing); - } - if (usage === Usage.Other && leaseId !== containerProxy.original.leaseId && leaseId !== undefined) { - throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); - } - break; - case LeaseStatus.BROKEN: - if (leaseId) { - throw new AError(ErrorCodes.LeaseNotPresentWithContainerOperation); - } - break; - case LeaseStatus.EXPIRED: - if (leaseId) { - throw new AError(ErrorCodes.LeaseNotPresentWithContainerOperation); - } - break; + switch (containerProxy.original.leaseState) { + case LeaseStatus.AVAILABLE: + if (leaseId) { + throw new AError(ErrorCodes.LeaseNotPresentWithContainerOperation); + } + break; + case LeaseStatus.LEASED: + if (usage === Usage.Delete && !leaseId) { + throw new AError(ErrorCodes.LeaseIdMissing); + } + if ( + usage === Usage.Delete && + leaseId !== containerProxy.original.leaseId + ) { + throw new AError(ErrorCodes.LeaseIdMismatchWithContainerOperation); + } + if ( + usage === Usage.Other && + leaseId !== containerProxy.original.leaseId && + leaseId !== undefined + ) { + throw new AError(ErrorCodes.LeaseIdMismatchWithContainerOperation); + } + break; + case LeaseStatus.BREAKING: + if ( + usage === Usage.Delete && + leaseId !== containerProxy.original.leaseId + ) { + throw new AError(ErrorCodes.LeaseIdMismatchWithContainerOperation); + } + if (usage === Usage.Delete && !leaseId) { + throw new AError(ErrorCodes.LeaseIdMissing); + } + if ( + usage === Usage.Other && + leaseId !== containerProxy.original.leaseId && + leaseId !== undefined + ) { + throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); + } + break; + case LeaseStatus.BROKEN: + if (leaseId) { + throw new AError(ErrorCodes.LeaseNotPresentWithContainerOperation); + } + break; + case LeaseStatus.EXPIRED: + if (leaseId) { + throw new AError(ErrorCodes.LeaseNotPresentWithContainerOperation); } + break; } + } } -module.exports = new ContainerLeaseUsage(); \ No newline at end of file +module.exports = new ContainerLeaseUsage(); diff --git a/lib/validation/blob/ContainerName.js b/lib/validation/blob/ContainerName.js index a48c88801..83d160510 100644 --- a/lib/validation/blob/ContainerName.js +++ b/lib/validation/blob/ContainerName.js @@ -1,28 +1,29 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); /* * Checks whether the container name adheres to the naming convention * as specified at https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata */ class ContainerName { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - const name = request.containerName; - if (name === '$root') { - return; - } - if (name.length < 3 || name.length > 63) { - throw new AError(ErrorCodes.OutOfRangeInput); - } - if (/^([a-z0-9]+)(-[a-z0-9]+)*$/i.test(name) === false) { - throw new AError(ErrorCodes.InvalidInput); - } + validate({ request = undefined }) { + const name = request.containerName; + if (name === "$root") { + return; + } + if (name.length < 3 || name.length > 63) { + throw new AError(ErrorCodes.OutOfRangeInput); + } + if (/^([a-z0-9]+)(-[a-z0-9]+)*$/i.test(name) === false) { + throw new AError(ErrorCodes.InvalidInput); } + } } -module.exports = new ContainerName; \ No newline at end of file +module.exports = new ContainerName(); diff --git a/lib/validation/blob/ContentLengthExists.js b/lib/validation/blob/ContentLengthExists.js index 643e65988..12267b4fb 100644 --- a/lib/validation/blob/ContentLengthExists.js +++ b/lib/validation/blob/ContentLengthExists.js @@ -1,18 +1,19 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + ErrorCodes = require("./../../core/ErrorCodes"); class ContentLengthExists { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - if (!request.httpProps[N.CONTENT_LENGTH]) { - throw new AError(ErrorCodes.MissingContentLengthHeader); - } + validate({ request = undefined }) { + if (!request.httpProps[N.CONTENT_LENGTH]) { + throw new AError(ErrorCodes.MissingContentLengthHeader); } + } } -module.exports = new ContentLengthExists(); \ No newline at end of file +module.exports = new ContentLengthExists(); diff --git a/lib/validation/blob/CopyStatus.js b/lib/validation/blob/CopyStatus.js index 3bb184534..705b1a524 100644 --- a/lib/validation/blob/CopyStatus.js +++ b/lib/validation/blob/CopyStatus.js @@ -1,23 +1,27 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'), - CopyStat = require('./../../core/Constants').CopyStatus; +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"), + CopyStat = require("./../../core/Constants").CopyStatus; /** * Checks whether the a pending copy operation already exists at the destination. - * + * * @class CopyStatus */ class CopyStatus { - constructor() { - } + constructor() {} - validate({ blobProxy = undefined }) { - if (blobProxy !== undefined && blobProxy.original.copyStatus === CopyStat.PENDING) { - throw new AError(ErrorCodes.PendingCopyOperation); - } + validate({ blobProxy = undefined }) { + if ( + blobProxy !== undefined && + blobProxy.original.copyStatus === CopyStat.PENDING + ) { + throw new AError(ErrorCodes.PendingCopyOperation); } + } } -module.exports = new CopyStatus(); \ No newline at end of file +module.exports = new CopyStatus(); diff --git a/lib/validation/blob/IsOfBlobType.js b/lib/validation/blob/IsOfBlobType.js index ea3493b6e..e7e179545 100644 --- a/lib/validation/blob/IsOfBlobType.js +++ b/lib/validation/blob/IsOfBlobType.js @@ -1,21 +1,21 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); /* * Checks whether the blob has specific type. */ class IsOfBlobType { - constructor() { - } + constructor() {} - validate({ blobProxy = undefined, moduleOptions = undefined }) { - if (blobProxy.original.entityType !== moduleOptions.entityType) { - throw new AError(ErrorCodes.InvalidBlobType); - } + validate({ blobProxy = undefined, moduleOptions = undefined }) { + if (blobProxy.original.entityType !== moduleOptions.entityType) { + throw new AError(ErrorCodes.InvalidBlobType); } + } } -module.exports = new IsOfBlobType; \ No newline at end of file +module.exports = new IsOfBlobType(); diff --git a/lib/validation/blob/LeaseActions.js b/lib/validation/blob/LeaseActions.js index a48b19d15..44ad6aa0b 100644 --- a/lib/validation/blob/LeaseActions.js +++ b/lib/validation/blob/LeaseActions.js @@ -1,105 +1,139 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - LeaseAction = require('./../../core/Constants').LeaseActions, - LeaseStatus = require('./../../core/Constants').LeaseStatus, - BlobRequest = require('./../../model/blob/AzuriteBlobRequest'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + LeaseAction = require("./../../core/Constants").LeaseActions, + LeaseStatus = require("./../../core/Constants").LeaseStatus, + BlobRequest = require("./../../model/blob/AzuriteBlobRequest"), + ErrorCodes = require("./../../core/ErrorCodes"); /** * Checks whether intended lease operation is semantically valid as specified * at https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container - * + * * @class LeaseActions */ class LeaseActions { - constructor() { - } + constructor() {} - validate({ request = undefined, containerProxy = undefined, blobProxy = undefined }) { - const leaseAction = request.httpProps[N.LEASE_ACTION], - leaseId = request.httpProps[N.LEASE_ID] || request.httpProps[N.PROPOSED_LEASE_ID], - proxy = (request instanceof BlobRequest) ? blobProxy : containerProxy; + validate({ + request = undefined, + containerProxy = undefined, + blobProxy = undefined, + }) { + const leaseAction = request.httpProps[N.LEASE_ACTION], + leaseId = + request.httpProps[N.LEASE_ID] || request.httpProps[N.PROPOSED_LEASE_ID], + proxy = request instanceof BlobRequest ? blobProxy : containerProxy; - if (![LeaseAction.ACQUIRE, LeaseAction.RENEW, LeaseAction.CHANGE, LeaseAction.RELEASE, LeaseAction.BREAK].includes(leaseAction)) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } + if ( + ![ + LeaseAction.ACQUIRE, + LeaseAction.RENEW, + LeaseAction.CHANGE, + LeaseAction.RELEASE, + LeaseAction.BREAK, + ].includes(leaseAction) + ) { + throw new AError(ErrorCodes.InvalidHeaderValue); + } - proxy.updateLeaseState(); + proxy.updateLeaseState(); - switch (proxy.original.leaseState) { - case LeaseStatus.AVAILABLE: - if (leaseAction === LeaseAction.RELEASE) { - throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); - } - if (leaseAction !== LeaseAction.ACQUIRE) { - throw new AError(ErrorCodes.LeaseNotPresentWithLeaseOperation); - } - break; - case LeaseStatus.LEASED: - if (leaseAction === LeaseAction.ACQUIRE && leaseId !== proxy.original.leaseId) { - throw new AError(ErrorCodes.LeaseAlreadyPresent); - } - if (leaseAction === LeaseAction.CHANGE) { - if (request.httpProps[N.PROPOSED_LEASE_ID] === undefined) { - throw new AError(ErrorCodes.MissingRequiredHeader); - } - if (request.httpProps[N.PROPOSED_LEASE_ID] !== proxy.original.leaseId && request.httpProps[N.LEASE_ID] !== proxy.original.leaseId) { - throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); - } - } - if ([LeaseAction.RENEW, LeaseAction.RELEASE].includes(leaseAction) && - leaseId !== proxy.original.leaseId) { - throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); - } - break; - case LeaseStatus.EXPIRED: - if (leaseAction === LeaseAction.CHANGE) { - throw new AError(ErrorCodes.LeaseNotPresentWithLeaseOperation); - // This is the only validation check specific to Blobs - } else if (leaseAction === LeaseAction.RENEW && request instanceof BlobRequest && leaseId === proxy.original.leaseId && proxy.original.leaseETag !== proxy.original.etag) { - throw new AError(ErrorCodes.LeaseNotPresentWithLeaseOperation) - } - else if ((leaseAction === LeaseAction.RENEW || leaseAction === LeaseAction.RELEASE) && - leaseId !== proxy.original.leaseId) { - throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); - } - break; - case LeaseStatus.BREAKING: - if (leaseId === proxy.original.leaseId) { - if (leaseAction === LeaseAction.ACQUIRE) { - throw new AError(ErrorCodes.LeaseIsBreakingAndCannotBeAcquired); - } - if (leaseAction === LeaseAction.CHANGE) { - throw new AError(ErrorCodes.LeaseIsBreakingAndCannotBeChanged); - } - } else { - if (leaseAction === LeaseAction.RELEASE) { - throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); - } - if (leaseAction === LeaseAction.CHANGE) { - throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); - } - if (leaseAction === LeaseAction.ACQUIRE || - leaseAction === LeaseAction.RENEW) { - throw new AError(ErrorCodes.LeaseAlreadyPresent); - } - } - break; - case LeaseStatus.BROKEN: - if (leaseAction === LeaseAction.RENEW) { - throw new AError(ErrorCodes.LeaseIsBrokenAndCannotBeRenewed); - } - if (leaseAction === LeaseAction.CHANGE) { - throw new AError(ErrorCodes.LeaseNotPresentWithLeaseOperation); - } - if (leaseAction === LeaseAction.RELEASE && leaseId !== proxy.original.leaseId) { - throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); - } - break; + switch (proxy.original.leaseState) { + case LeaseStatus.AVAILABLE: + if (leaseAction === LeaseAction.RELEASE) { + throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); + } + if (leaseAction !== LeaseAction.ACQUIRE) { + throw new AError(ErrorCodes.LeaseNotPresentWithLeaseOperation); + } + break; + case LeaseStatus.LEASED: + if ( + leaseAction === LeaseAction.ACQUIRE && + leaseId !== proxy.original.leaseId + ) { + throw new AError(ErrorCodes.LeaseAlreadyPresent); + } + if (leaseAction === LeaseAction.CHANGE) { + if (request.httpProps[N.PROPOSED_LEASE_ID] === undefined) { + throw new AError(ErrorCodes.MissingRequiredHeader); + } + if ( + request.httpProps[N.PROPOSED_LEASE_ID] !== proxy.original.leaseId && + request.httpProps[N.LEASE_ID] !== proxy.original.leaseId + ) { + throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); + } + } + if ( + [LeaseAction.RENEW, LeaseAction.RELEASE].includes(leaseAction) && + leaseId !== proxy.original.leaseId + ) { + throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); + } + break; + case LeaseStatus.EXPIRED: + if (leaseAction === LeaseAction.CHANGE) { + throw new AError(ErrorCodes.LeaseNotPresentWithLeaseOperation); + // This is the only validation check specific to Blobs + } else if ( + leaseAction === LeaseAction.RENEW && + request instanceof BlobRequest && + leaseId === proxy.original.leaseId && + proxy.original.leaseETag !== proxy.original.etag + ) { + throw new AError(ErrorCodes.LeaseNotPresentWithLeaseOperation); + } else if ( + (leaseAction === LeaseAction.RENEW || + leaseAction === LeaseAction.RELEASE) && + leaseId !== proxy.original.leaseId + ) { + throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); + } + break; + case LeaseStatus.BREAKING: + if (leaseId === proxy.original.leaseId) { + if (leaseAction === LeaseAction.ACQUIRE) { + throw new AError(ErrorCodes.LeaseIsBreakingAndCannotBeAcquired); + } + if (leaseAction === LeaseAction.CHANGE) { + throw new AError(ErrorCodes.LeaseIsBreakingAndCannotBeChanged); + } + } else { + if (leaseAction === LeaseAction.RELEASE) { + throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); + } + if (leaseAction === LeaseAction.CHANGE) { + throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); + } + if ( + leaseAction === LeaseAction.ACQUIRE || + leaseAction === LeaseAction.RENEW + ) { + throw new AError(ErrorCodes.LeaseAlreadyPresent); + } + } + break; + case LeaseStatus.BROKEN: + if (leaseAction === LeaseAction.RENEW) { + throw new AError(ErrorCodes.LeaseIsBrokenAndCannotBeRenewed); + } + if (leaseAction === LeaseAction.CHANGE) { + throw new AError(ErrorCodes.LeaseNotPresentWithLeaseOperation); + } + if ( + leaseAction === LeaseAction.RELEASE && + leaseId !== proxy.original.leaseId + ) { + throw new AError(ErrorCodes.LeaseIdMismatchWithLeaseOperation); } + break; } + } } -module.exports = new LeaseActions(); \ No newline at end of file +module.exports = new LeaseActions(); diff --git a/lib/validation/blob/LeaseDuration.js b/lib/validation/blob/LeaseDuration.js index bd3f85b01..289471401 100644 --- a/lib/validation/blob/LeaseDuration.js +++ b/lib/validation/blob/LeaseDuration.js @@ -1,42 +1,48 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - LeaseAction = require('./../../core/Constants').LeaseActions, - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + LeaseAction = require("./../../core/Constants").LeaseActions, + ErrorCodes = require("./../../core/ErrorCodes"); /** * Checks whether lease duration and lease break period conforms to specification * at https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container: * leaseDuration: -1;[15-60] - * leaseBreakPeriod: [0-60] - * + * leaseBreakPeriod: [0-60] + * * @class LeaseDuration */ class LeaseDuration { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - const leaseAction = request.httpProps[N.LEASE_ACTION], - leaseBreakPeriod = (request.httpProps[N.LEASE_BREAK_PERIOD]) ? parseInt(request.httpProps[N.LEASE_BREAK_PERIOD]) : undefined, - leaseDuration = (request.httpProps[N.LEASE_DURATION]) ? parseInt(request.httpProps[N.LEASE_DURATION]) : undefined; + validate({ request = undefined }) { + const leaseAction = request.httpProps[N.LEASE_ACTION], + leaseBreakPeriod = request.httpProps[N.LEASE_BREAK_PERIOD] + ? parseInt(request.httpProps[N.LEASE_BREAK_PERIOD]) + : undefined, + leaseDuration = request.httpProps[N.LEASE_DURATION] + ? parseInt(request.httpProps[N.LEASE_DURATION]) + : undefined; - // x-ms-lease-duration is only required and processed for lease action 'acquire' - if (leaseAction === LeaseAction.ACQUIRE) { - if (!(leaseDuration === -1 || leaseDuration >= 15 && leaseDuration <= 60)) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - } - - // x-ms-lease-break-period is optional - if (leaseBreakPeriod) { - if (!(leaseBreakPeriod >= 0 && leaseBreakPeriod <= 60)) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - } + // x-ms-lease-duration is only required and processed for lease action 'acquire' + if (leaseAction === LeaseAction.ACQUIRE) { + if ( + !(leaseDuration === -1 || (leaseDuration >= 15 && leaseDuration <= 60)) + ) { + throw new AError(ErrorCodes.InvalidHeaderValue); + } + } + // x-ms-lease-break-period is optional + if (leaseBreakPeriod) { + if (!(leaseBreakPeriod >= 0 && leaseBreakPeriod <= 60)) { + throw new AError(ErrorCodes.InvalidHeaderValue); + } } + } } -module.exports = new LeaseDuration(); \ No newline at end of file +module.exports = new LeaseDuration(); diff --git a/lib/validation/blob/LeaseId.js b/lib/validation/blob/LeaseId.js index 28a66bcd3..bfcbc46cf 100644 --- a/lib/validation/blob/LeaseId.js +++ b/lib/validation/blob/LeaseId.js @@ -1,30 +1,31 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - ErrorCodes = require('./../../core/ErrorCodes'), - isUUID = require('validator/lib/isUUID'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + ErrorCodes = require("./../../core/ErrorCodes"), + isUUID = require("validator/lib/isUUID"); /** * Checks whether leaseId complies to RFC4122 (UUID) version 3-5. - * + * * @class LeaseId */ class LeaseId { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - const leaseId = request.httpProps[N.LEASE_ID], - proposedLeaseId = request.httpProps[N.PROPOSED_LEASE_ID]; + validate({ request = undefined }) { + const leaseId = request.httpProps[N.LEASE_ID], + proposedLeaseId = request.httpProps[N.PROPOSED_LEASE_ID]; - if (leaseId && !isUUID(leaseId, 'all')) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - if (proposedLeaseId && !isUUID(proposedLeaseId, 'all')) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } + if (leaseId && !isUUID(leaseId, "all")) { + throw new AError(ErrorCodes.InvalidHeaderValue); + } + if (proposedLeaseId && !isUUID(proposedLeaseId, "all")) { + throw new AError(ErrorCodes.InvalidHeaderValue); } + } } -module.exports = new LeaseId(); \ No newline at end of file +module.exports = new LeaseId(); diff --git a/lib/validation/blob/MD5.js b/lib/validation/blob/MD5.js index 5601bb45d..c825afa1b 100644 --- a/lib/validation/blob/MD5.js +++ b/lib/validation/blob/MD5.js @@ -1,21 +1,22 @@ -'use strict'; +/** @format */ -const crypto = require('crypto'), - N = require('./../../core/HttpHeaderNames'), - AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const crypto = require("crypto"), + N = require("./../../core/HttpHeaderNames"), + AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); class MD5 { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - const sourceMd5 = request.httpProps[N.CONTENT_MD5]; - const targetMd5 = request.calculateContentMd5(); - if (sourceMd5 && targetMd5 !== sourceMd5) { - throw new AError(ErrorCodes.Md5Mismatch); - } + validate({ request = undefined }) { + const sourceMd5 = request.httpProps[N.CONTENT_MD5]; + const targetMd5 = request.calculateContentMd5(); + if (sourceMd5 && targetMd5 !== sourceMd5) { + throw new AError(ErrorCodes.Md5Mismatch); } + } } -module.exports = new MD5(); \ No newline at end of file +module.exports = new MD5(); diff --git a/lib/validation/blob/OriginHeader.js b/lib/validation/blob/OriginHeader.js index bb503d316..1ec6c86c5 100644 --- a/lib/validation/blob/OriginHeader.js +++ b/lib/validation/blob/OriginHeader.js @@ -1,23 +1,24 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + ErrorCodes = require("./../../core/ErrorCodes"); /** - * Validates whether the 'Origin' request header is set. - * - * @class + * Validates whether the 'Origin' request header is set. + * + * @class */ class OriginHeader { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - if(!request.httpProps[N.ORIGIN]) { - throw new AError(ErrorCodes.MissingRequiredHeader); - } + validate({ request = undefined }) { + if (!request.httpProps[N.ORIGIN]) { + throw new AError(ErrorCodes.MissingRequiredHeader); } + } } -module.exports = new OriginHeader; \ No newline at end of file +module.exports = new OriginHeader(); diff --git a/lib/validation/blob/PageAlignment.js b/lib/validation/blob/PageAlignment.js index fc4495bce..779f0b9f4 100644 --- a/lib/validation/blob/PageAlignment.js +++ b/lib/validation/blob/PageAlignment.js @@ -1,38 +1,39 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + ErrorCodes = require("./../../core/ErrorCodes"); /** * Validates the 512-byte alignment of a Page Blob. - * Given that pages must be aligned with 512-byte boundaries, - * the start offset must be a modulus of 512 and the end offset must be a modulus of 512 – 1. - * Examples of valid byte ranges are 0-511, 512-1023, etc. - * + * Given that pages must be aligned with 512-byte boundaries, + * the start offset must be a modulus of 512 and the end offset must be a modulus of 512 – 1. + * Examples of valid byte ranges are 0-511, 512-1023, etc. + * * @class PageAlignment */ class PageAlignment { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - const range = request.httpProps[N.RANGE]; - // Range is optional - if (!range) { - return; - } - const re = new RegExp(/bytes=[0-9]+-[0-9]+/); - if (!re.test(range)) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - const parts = range.split('=')[1].split('-'); - const startByte = parseInt(parts[0]), - endByte = parseInt(parts[1]); - if (startByte % 512 !== 0 || ((endByte + 1) - startByte) % 512 !== 0) { - throw new AError(ErrorCodes.InvalidPageRange); - } + validate({ request = undefined }) { + const range = request.httpProps[N.RANGE]; + // Range is optional + if (!range) { + return; + } + const re = new RegExp(/bytes=[0-9]+-[0-9]+/); + if (!re.test(range)) { + throw new AError(ErrorCodes.InvalidHeaderValue); + } + const parts = range.split("=")[1].split("-"); + const startByte = parseInt(parts[0]), + endByte = parseInt(parts[1]); + if (startByte % 512 !== 0 || (endByte + 1 - startByte) % 512 !== 0) { + throw new AError(ErrorCodes.InvalidPageRange); } + } } -module.exports = new PageAlignment; \ No newline at end of file +module.exports = new PageAlignment(); diff --git a/lib/validation/blob/PageBlobHeaderSanity.js b/lib/validation/blob/PageBlobHeaderSanity.js index 9ae02e10d..02fe6b4bf 100644 --- a/lib/validation/blob/PageBlobHeaderSanity.js +++ b/lib/validation/blob/PageBlobHeaderSanity.js @@ -1,47 +1,48 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + ErrorCodes = require("./../../core/ErrorCodes"); class PageBlobHeaderSanity { - constructor() { + constructor() {} + + validate({ request = undefined }) { + const httpProps = request.httpProps; + let pageWrite = httpProps[N.PAGE_WRITE]; + + if (!pageWrite) { + throw new AError(ErrorCodes.InvalidHeaderValue); + } + + pageWrite = pageWrite.toLowerCase(); + + if (!(pageWrite === "clear" || pageWrite === "update")) { + throw new AError(ErrorCodes.InvalidHeaderValue); + } + + const isClearSet = pageWrite === "clear"; + if (isClearSet && httpProps[N.CONTENT_LENGTH] != 0) { + throw new AError(ErrorCodes.InvalidHeaderValue); + } + if (isClearSet && httpProps[N.CONTENT_MD5]) { + throw new AError(ErrorCodes.InvalidHeaderValue); } - validate({ request = undefined }) { - const httpProps = request.httpProps; - let pageWrite = httpProps[N.PAGE_WRITE]; - - if (!pageWrite) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - - pageWrite = pageWrite.toLowerCase(); - - if (!(pageWrite === 'clear' || pageWrite === 'update')) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - - const isClearSet = pageWrite === 'clear'; - if (isClearSet && httpProps[N.CONTENT_LENGTH] != 0) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - if (isClearSet && httpProps[N.CONTENT_MD5]) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - - const range = httpProps[N.RANGE]; - // This is safe since range string has already been validated to be well-formed - // in PageAlignment Validator. - const parts = range.split('=')[1].split('-'); - if (!isClearSet) { - const startByte = parseInt(parts[0]), - endByte = parseInt(parts[1]); - if (httpProps[N.CONTENT_LENGTH] != (endByte - startByte) + 1) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - } + const range = httpProps[N.RANGE]; + // This is safe since range string has already been validated to be well-formed + // in PageAlignment Validator. + const parts = range.split("=")[1].split("-"); + if (!isClearSet) { + const startByte = parseInt(parts[0]), + endByte = parseInt(parts[1]); + if (httpProps[N.CONTENT_LENGTH] != endByte - startByte + 1) { + throw new AError(ErrorCodes.InvalidHeaderValue); + } } + } } -module.exports = new PageBlobHeaderSanity; +module.exports = new PageBlobHeaderSanity(); diff --git a/lib/validation/blob/PutBlobHeaders.js b/lib/validation/blob/PutBlobHeaders.js index d580146e8..805374d40 100644 --- a/lib/validation/blob/PutBlobHeaders.js +++ b/lib/validation/blob/PutBlobHeaders.js @@ -1,26 +1,27 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - EntityType = require('./../../core/Constants').StorageEntityType, - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + EntityType = require("./../../core/Constants").StorageEntityType, + ErrorCodes = require("./../../core/ErrorCodes"); class PutBlobHeaders { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - const length = request.httpProps[N.BLOB_CONTENT_LENGTH]; - if (request.entityType === EntityType.PageBlob) { - if (length && (length < 0 || length % 512 != 0)) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - } else { - if (length) { - throw new AError(ErrorCodes.UnsupportedHeader); - } - } + validate({ request = undefined }) { + const length = request.httpProps[N.BLOB_CONTENT_LENGTH]; + if (request.entityType === EntityType.PageBlob) { + if (length && (length < 0 || length % 512 != 0)) { + throw new AError(ErrorCodes.InvalidHeaderValue); + } + } else { + if (length) { + throw new AError(ErrorCodes.UnsupportedHeader); + } } + } } module.exports = new PutBlobHeaders(); diff --git a/lib/validation/blob/Range.js b/lib/validation/blob/Range.js index ed2a27ad2..b6d2e92bc 100644 --- a/lib/validation/blob/Range.js +++ b/lib/validation/blob/Range.js @@ -1,63 +1,68 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + ErrorCodes = require("./../../core/ErrorCodes"); /* * Checks whether the range header (and headers depending on it) are valid. */ class Range { - constructor() { + constructor() {} + + validate({ request = undefined, blobProxy = undefined }) { + const range = request.httpProps[N.RANGE]; + const x_ms_range_get_content_md5 = + request.httpProps[N.RANGE_GET_CONTENT_MD5]; + // If this header is specified without the Range header, + // the service returns status code 400 (Bad Request). + // We are using raw 'range' string here since docs at + // https://docs.microsoft.com/de-de/azure/container-instances/container-instances-orchestrator-relationship + // do not mention x-ms-range header explictly + if ( + x_ms_range_get_content_md5 && + request.httpProps["range"] === undefined + ) { + throw new AError(ErrorCodes.InvalidHeaderValue); + } + // If this header is set to true _and_ the range exceeds 4 MB in size, + // the service returns status code 400 (Bad Request). + if (x_ms_range_get_content_md5 && this._isRangeExceeded(range)) { + throw new AError(ErrorCodes.InvalidHeaderValue); } - validate({ request = undefined, blobProxy = undefined }) { - const range = request.httpProps[N.RANGE]; - const x_ms_range_get_content_md5 = request.httpProps[N.RANGE_GET_CONTENT_MD5]; - // If this header is specified without the Range header, - // the service returns status code 400 (Bad Request). - // We are using raw 'range' string here since docs at - // https://docs.microsoft.com/de-de/azure/container-instances/container-instances-orchestrator-relationship - // do not mention x-ms-range header explictly - if (x_ms_range_get_content_md5 && request.httpProps['range'] === undefined) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - // If this header is set to true _and_ the range exceeds 4 MB in size, - // the service returns status code 400 (Bad Request). - if (x_ms_range_get_content_md5 && this._isRangeExceeded(range)) { - throw new AError(ErrorCodes.InvalidHeaderValue); - } - - if (!this._withinRange(blobProxy.original.size, range)) { - throw new AError(ErrorCodes.InvalidRange); - } + if (!this._withinRange(blobProxy.original.size, range)) { + throw new AError(ErrorCodes.InvalidRange); } + } - // See http://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7233.html for spec of HTTP Byte Ranges - _withinRange(blobSize, range) { - if (range === undefined) { - return true; - } - const pair = range.split('=')[1].split('-'); - const startByte = parseInt(pair[0]); - return isNaN(startByte) || startByte < blobSize; + // See http://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7233.html for spec of HTTP Byte Ranges + _withinRange(blobSize, range) { + if (range === undefined) { + return true; } + const pair = range.split("=")[1].split("-"); + const startByte = parseInt(pair[0]); + return isNaN(startByte) || startByte < blobSize; + } - /* + /* * Checks whether the range is bigger than 4MB (which is not allowed when * x-ms-range-get-content-md5 is set to true ) * If there is invalid data in that string, function returns false * since boolean expression will contain at least one 'NaN'' in any invalid case. */ - _isRangeExceeded(range) { - if (range === undefined) { - return false; - } - const pair = range.split('=')[1].split('-'); - const startByte = parseInt(pair[0]); - const endByte = parseInt(pair[1]); - return endByte - startByte > 4194304; + _isRangeExceeded(range) { + if (range === undefined) { + return false; } + const pair = range.split("=")[1].split("-"); + const startByte = parseInt(pair[0]); + const endByte = parseInt(pair[1]); + return endByte - startByte > 4194304; + } } -module.exports = new Range(); \ No newline at end of file +module.exports = new Range(); diff --git a/lib/validation/blob/ServiceProperties.js b/lib/validation/blob/ServiceProperties.js index 80d3500b5..352cd5759 100644 --- a/lib/validation/blob/ServiceProperties.js +++ b/lib/validation/blob/ServiceProperties.js @@ -1,71 +1,80 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; -const allowedMethods = ['delete', 'get', 'head', 'merge', 'post', 'options', 'put']; +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); + +const allowedMethods = [ + "delete", + "get", + "head", + "merge", + "post", + "options", + "put", +]; class ServiceProperties { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - const serviceProps = request.payload.StorageServiceProperties; + validate({ request = undefined }) { + const serviceProps = request.payload.StorageServiceProperties; - ////////////////////////// - // Validating CORS Rules - const rules = serviceProps.Cors ? serviceProps.Cors.CorsRule : []; + ////////////////////////// + // Validating CORS Rules + const rules = serviceProps.Cors ? serviceProps.Cors.CorsRule : []; - // A minimum of five rules can be stored - if (rules.length > 5) { - throw new AError(ErrorCodes.InvalidXmlRequest); - } + // A minimum of five rules can be stored + if (rules.length > 5) { + throw new AError(ErrorCodes.InvalidXmlRequest); + } - for (const rule of rules) { - // These elements are required - if (!(rule.AllowedMethods && rule.AllowedHeaders && rule.ExposedHeaders)) { - throw new AError(ErrorCodes.InvalidXmlRequest); - } - // Allowed Methods - rule.AllowedMethods.split(',') - .map((e) => { - return e.toLowerCase().replace(' ', ''); - }) - .forEach((e) => { - if (!allowedMethods.includes(e)) { - throw new AError(ErrorCodes.InvalidXmlRequest); - } - }); + for (const rule of rules) { + // These elements are required + if ( + !(rule.AllowedMethods && rule.AllowedHeaders && rule.ExposedHeaders) + ) { + throw new AError(ErrorCodes.InvalidXmlRequest); + } + // Allowed Methods + rule.AllowedMethods.split(",") + .map((e) => { + return e.toLowerCase().replace(" ", ""); + }) + .forEach((e) => { + if (!allowedMethods.includes(e)) { + throw new AError(ErrorCodes.InvalidXmlRequest); + } + }); - // Allowed Headers - let numHeader = 0, - numPrefixHeader = 0; - rule.AllowedHeaders.split(',') - .forEach((e) => { - (e.includes(`\*`)) ? ++numPrefixHeader : ++numHeader; - if (numPrefixHeader > 2 || numHeader > 64) { - throw new AError(ErrorCodes.InvalidXmlRequest); - } - if (e.length > 256) { - throw new AError(ErrorCodes.InvalidXmlRequest); - } - }); + // Allowed Headers + let numHeader = 0, + numPrefixHeader = 0; + rule.AllowedHeaders.split(",").forEach((e) => { + e.includes(`\*`) ? ++numPrefixHeader : ++numHeader; + if (numPrefixHeader > 2 || numHeader > 64) { + throw new AError(ErrorCodes.InvalidXmlRequest); + } + if (e.length > 256) { + throw new AError(ErrorCodes.InvalidXmlRequest); + } + }); - // Exposed Headers - numHeader = 0; - numPrefixHeader = 0; - rule.ExposedHeaders.split(',') - .forEach((e) => { - (e.includes(`\*`)) ? ++numPrefixHeader : ++numHeader; - if (numPrefixHeader > 2 || numHeader > 64) { - throw new AError(ErrorCodes.InvalidXmlRequest); - } - if (e.length > 256) { - throw new AError(ErrorCodes.InvalidXmlRequest); - } - }); + // Exposed Headers + numHeader = 0; + numPrefixHeader = 0; + rule.ExposedHeaders.split(",").forEach((e) => { + e.includes(`\*`) ? ++numPrefixHeader : ++numHeader; + if (numPrefixHeader > 2 || numHeader > 64) { + throw new AError(ErrorCodes.InvalidXmlRequest); + } + if (e.length > 256) { + throw new AError(ErrorCodes.InvalidXmlRequest); } + }); } + } } -module.exports = new ServiceProperties(); \ No newline at end of file +module.exports = new ServiceProperties(); diff --git a/lib/validation/blob/ServiceSignature.js b/lib/validation/blob/ServiceSignature.js index 58fb0f511..c1cbc8ae2 100644 --- a/lib/validation/blob/ServiceSignature.js +++ b/lib/validation/blob/ServiceSignature.js @@ -1,63 +1,72 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); /** - * Checks whether the operation is authorized by the service signature (if existing). - * + * Checks whether the operation is authorized by the service signature (if existing). + * * @class ServiceSignature */ class ServiceSignature { - constructor() { + constructor() {} + + validate({ + request = undefined, + containerProxy = undefined, + blobProxy = undefined, + moduleOptions = undefined, + }) { + if (request.auth === undefined) { + // NOOP: No Service Signature signature was defined in the request + return; + } + + if (!request.auth.sasValid) { + throw new AError(ErrorCodes.AuthenticationFailed); + } + + const operation = moduleOptions.sasOperation, + accessPolicy = request.auth.accessPolicy, + resource = request.auth.resource; + + let start = undefined, + expiry = undefined, + permissions = undefined; + + if (request.auth.accessPolicy.id !== undefined) { + const si = + containerProxy.original.signedIdentifiers !== undefined + ? containerProxy.original.signedIdentifiers.SignedIdentifier.filter( + (i) => { + return i.Id === request.auth.accessPolicy.id; + } + )[0] + : undefined; + if (si === undefined) { + throw new AError(ErrorCodes.AuthenticationFailed); + } + start = Date.parse(si.AccessPolicy.Start); + expiry = Date.parse(si.AccessPolicy.Expiry); + permissions = si.AccessPolicy.Permission; + } else { + start = Date.parse(accessPolicy.start); // Possibly NaN + expiry = Date.parse(accessPolicy.expiry); // Possibly NaN + permissions = accessPolicy.permissions; + } + + // Time Validation + if (isNaN(expiry) || request.now < start || request.now > expiry) { + throw new AError(ErrorCodes.AuthenticationFailed); } - validate({ request = undefined, containerProxy = undefined, blobProxy = undefined, moduleOptions = undefined }) { - if (request.auth === undefined) { - // NOOP: No Service Signature signature was defined in the request - return; - } - - if (!request.auth.sasValid) { - throw new AError(ErrorCodes.AuthenticationFailed); - } - - const operation = moduleOptions.sasOperation, - accessPolicy = request.auth.accessPolicy, - resource = request.auth.resource; - - let start = undefined, - expiry = undefined, - permissions = undefined; - - if (request.auth.accessPolicy.id !== undefined) { - const si = (containerProxy.original.signedIdentifiers !== undefined) - ? containerProxy.original.signedIdentifiers.SignedIdentifier.filter((i) => { - return i.Id === request.auth.accessPolicy.id; - })[0] - : undefined; - if (si === undefined) { - throw new AError(ErrorCodes.AuthenticationFailed); - } - start = Date.parse(si.AccessPolicy.Start); - expiry = Date.parse(si.AccessPolicy.Expiry); - permissions = si.AccessPolicy.Permission; - } else { - start = Date.parse(accessPolicy.start); // Possibly NaN - expiry = Date.parse(accessPolicy.expiry); // Possibly NaN - permissions = accessPolicy.permissions; - } - - // Time Validation - if (isNaN(expiry) || request.now < start || request.now > expiry) { - throw new AError(ErrorCodes.AuthenticationFailed); - } - - // Permission Validation - if (!permissions.includes(operation)) { - throw new AError(ErrorCodes.AuthorizationPermissionMismatch); - } + // Permission Validation + if (!permissions.includes(operation)) { + throw new AError(ErrorCodes.AuthorizationPermissionMismatch); } + } } -module.exports = new ServiceSignature(); \ No newline at end of file +module.exports = new ServiceSignature(); diff --git a/lib/validation/blob/SupportedBlobType.js b/lib/validation/blob/SupportedBlobType.js index b2c68ed2d..85d8e9f28 100644 --- a/lib/validation/blob/SupportedBlobType.js +++ b/lib/validation/blob/SupportedBlobType.js @@ -1,20 +1,23 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'), - EntityType = require('./../../core/Constants').StorageEntityType; +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"), + EntityType = require("./../../core/Constants").StorageEntityType; class SupportedBlobType { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - if (request.entityType !== EntityType.AppendBlob && - request.entityType !== EntityType.BlockBlob && - request.entityType !== EntityType.PageBlob) { - throw new AError(ErrorCodes.UnsupportedBlobType); - } + validate({ request = undefined }) { + if ( + request.entityType !== EntityType.AppendBlob && + request.entityType !== EntityType.BlockBlob && + request.entityType !== EntityType.PageBlob + ) { + throw new AError(ErrorCodes.UnsupportedBlobType); } + } } -module.exports = new SupportedBlobType(); \ No newline at end of file +module.exports = new SupportedBlobType(); diff --git a/lib/validation/blob/ValidationContext.js b/lib/validation/blob/ValidationContext.js index 00adeba44..addd66ab9 100644 --- a/lib/validation/blob/ValidationContext.js +++ b/lib/validation/blob/ValidationContext.js @@ -1,43 +1,56 @@ -'use strict'; +/** @format */ + +"use strict"; /** * The in-memory DB of Azurite serves as the exclusive source of truth for every validation. * Since the validation is synchronous / single-threaded we can be certain about the exact state of the entire * application before and after @see ValidationContext exits. - * + * * In case a validation fails an according @see AzuriteException is thrown which is then processed * by the validation middleware module middleware/blob/validation.js - * + * * @class ValidationContext */ class ValidationContext { - constructor({ request = undefined, containerProxy = undefined, blobProxy = undefined }) { - this.request = request; - this.containerProxy = containerProxy; - this.blobProxy = blobProxy; - } + constructor({ + request = undefined, + containerProxy = undefined, + blobProxy = undefined, + }) { + this.request = request; + this.containerProxy = containerProxy; + this.blobProxy = blobProxy; + } - /** - * Runs a validation module. - * - * @param {Object} valModule - * @param {Object} moduleOptions - allows a validation module to selectively add attributes or overwrite them - * @param {boolean} skip - if set to true validation module is not run. - * @returns this - * - * @memberOf ValidationContext - */ - run(valModule, moduleOptions, skip) { - if (skip) { - return this; - } - valModule.validate({ - request: moduleOptions ? moduleOptions.request || this.request : this.request, - containerProxy: moduleOptions ? moduleOptions.containerProxy || this.containerProxy : this.containerProxy, - blobProxy: moduleOptions ? moduleOptions.blobProxy || this.blobProxy : this.blobProxy, - moduleOptions: moduleOptions }); - return this; + /** + * Runs a validation module. + * + * @param {Object} valModule + * @param {Object} moduleOptions - allows a validation module to selectively add attributes or overwrite them + * @param {boolean} skip - if set to true validation module is not run. + * @returns this + * + * @memberOf ValidationContext + */ + run(valModule, moduleOptions, skip) { + if (skip) { + return this; } + valModule.validate({ + request: moduleOptions + ? moduleOptions.request || this.request + : this.request, + containerProxy: moduleOptions + ? moduleOptions.containerProxy || this.containerProxy + : this.containerProxy, + blobProxy: moduleOptions + ? moduleOptions.blobProxy || this.blobProxy + : this.blobProxy, + moduleOptions: moduleOptions, + }); + return this; + } } -module.exports = ValidationContext; \ No newline at end of file +module.exports = ValidationContext; diff --git a/lib/validation/queue/MessageExists.js b/lib/validation/queue/MessageExists.js index e25aaeee0..cbebd5b7b 100644 --- a/lib/validation/queue/MessageExists.js +++ b/lib/validation/queue/MessageExists.js @@ -1,21 +1,27 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'), - QueueManager = require('./../../core/queue/QueueManager'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"), + QueueManager = require("./../../core/queue/QueueManager"); /** * Validates whether the message with a given messageId exists. */ class MessageExists { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - if (QueueManager.getQueueAndMessage({ queueName: request.queueName, messageId: request.messageId }).message === undefined) { - throw new AError(ErrorCodes.MessageNotFound); - } + validate({ request = undefined }) { + if ( + QueueManager.getQueueAndMessage({ + queueName: request.queueName, + messageId: request.messageId, + }).message === undefined + ) { + throw new AError(ErrorCodes.MessageNotFound); } + } } -module.exports = new MessageExists(); \ No newline at end of file +module.exports = new MessageExists(); diff --git a/lib/validation/queue/MessageExpired.js b/lib/validation/queue/MessageExpired.js index 2e08301b5..85885db53 100644 --- a/lib/validation/queue/MessageExpired.js +++ b/lib/validation/queue/MessageExpired.js @@ -1,22 +1,26 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'), - QueueManager = require('./../../core/queue/QueueManager'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"), + QueueManager = require("./../../core/queue/QueueManager"); /** * Validates whether the message is already expired. */ class PopReceipt { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - const msg = QueueManager.getQueueAndMessage({ queueName: request.queueName, messageId: request.messageId }).message; - if (msg.expirationTime < request.now) { - throw new AError(ErrorCodes.MessageNotFound); - } + validate({ request = undefined }) { + const msg = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + messageId: request.messageId, + }).message; + if (msg.expirationTime < request.now) { + throw new AError(ErrorCodes.MessageNotFound); } + } } -module.exports = new PopReceipt(); \ No newline at end of file +module.exports = new PopReceipt(); diff --git a/lib/validation/queue/NumOfMessages.js b/lib/validation/queue/NumOfMessages.js index 13e76cfb9..00d0237c0 100644 --- a/lib/validation/queue/NumOfMessages.js +++ b/lib/validation/queue/NumOfMessages.js @@ -1,21 +1,22 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); /** * Validates the correct number space of the number of messages query parameter 'numofmessages'. * Parameter must be a non-zero integer n with 1 <= n <= 32 */ class NumOfMessages { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - if (request.numOfMessages < 1 || request.numOfMessages > 32) { - throw new AError(ErrorCodes.OutOfRangeInput); - } + validate({ request = undefined }) { + if (request.numOfMessages < 1 || request.numOfMessages > 32) { + throw new AError(ErrorCodes.OutOfRangeInput); } + } } -module.exports = new NumOfMessages(); \ No newline at end of file +module.exports = new NumOfMessages(); diff --git a/lib/validation/queue/PopReceipt.js b/lib/validation/queue/PopReceipt.js index 48f3c1f31..f2fc59f48 100644 --- a/lib/validation/queue/PopReceipt.js +++ b/lib/validation/queue/PopReceipt.js @@ -1,22 +1,26 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'), - QueueManager = require('./../../core/queue/QueueManager'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"), + QueueManager = require("./../../core/queue/QueueManager"); /** * Validates whether popreceipt of a given message is still valid. */ class PopReceipt { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - const msg = QueueManager.getQueueAndMessage({ queueName: request.queueName, messageId: request.messageId }).message; - if (msg.popReceipt !== request.popReceipt) { - throw new AError(ErrorCodes.PopReceiptMismatch); - } + validate({ request = undefined }) { + const msg = QueueManager.getQueueAndMessage({ + queueName: request.queueName, + messageId: request.messageId, + }).message; + if (msg.popReceipt !== request.popReceipt) { + throw new AError(ErrorCodes.PopReceiptMismatch); } + } } -module.exports = new PopReceipt(); \ No newline at end of file +module.exports = new PopReceipt(); diff --git a/lib/validation/queue/QueueCreation.js b/lib/validation/queue/QueueCreation.js index 9ae6d1cf5..36782d432 100644 --- a/lib/validation/queue/QueueCreation.js +++ b/lib/validation/queue/QueueCreation.js @@ -1,7 +1,9 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); /* * When a queue with the specified name already exists, the Queue service checks the metadata @@ -10,20 +12,19 @@ const AError = require('./../../core/AzuriteError'), * See https://docs.microsoft.com/rest/api/storageservices/create-queue4 for details. */ class QueueCreation { - constructor() { - } + constructor() {} - validate({ request = undefined, queue = undefined }) { - if (queue === undefined) { - return; - } - - Object.keys(queue.metaProps).forEach((prop) => { - if (queue.metaProps[prop] !== request.metaProps[prop]) { - throw new AError(ErrorCodes.QueueAlreadyExists); - } - }); + validate({ request = undefined, queue = undefined }) { + if (queue === undefined) { + return; } + + Object.keys(queue.metaProps).forEach((prop) => { + if (queue.metaProps[prop] !== request.metaProps[prop]) { + throw new AError(ErrorCodes.QueueAlreadyExists); + } + }); + } } -module.exports = new QueueCreation; \ No newline at end of file +module.exports = new QueueCreation(); diff --git a/lib/validation/queue/QueueExists.js b/lib/validation/queue/QueueExists.js index 3e3fce049..6981ab633 100644 --- a/lib/validation/queue/QueueExists.js +++ b/lib/validation/queue/QueueExists.js @@ -1,18 +1,18 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); class QueueExists { - constructor() { - } + constructor() {} - validate({ request = undefined, queue = undefined }) { - if (queue === undefined) { - throw new AError(ErrorCodes.QueueNotFound); - } + validate({ request = undefined, queue = undefined }) { + if (queue === undefined) { + throw new AError(ErrorCodes.QueueNotFound); } + } } -module.exports = new QueueExists; \ No newline at end of file +module.exports = new QueueExists(); diff --git a/lib/validation/queue/QueueMessageSize.js b/lib/validation/queue/QueueMessageSize.js index 250917b06..b627f392a 100644 --- a/lib/validation/queue/QueueMessageSize.js +++ b/lib/validation/queue/QueueMessageSize.js @@ -1,21 +1,22 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); /* * Checks whether the queue name adheres to the naming convention * as specified at https://docs.microsoft.com/en-us/rest/api/storageservices/naming-queues-and-metadata */ class QueueMessageSize { - constructor() { - } + constructor() {} - validate({ request = undefined }) { - if (request.bodyLength > 64000) { - throw new AError(ErrorCodes.MessageTooLarge); - } + validate({ request = undefined }) { + if (request.bodyLength > 64000) { + throw new AError(ErrorCodes.MessageTooLarge); } + } } -module.exports = new QueueMessageSize \ No newline at end of file +module.exports = new QueueMessageSize(); diff --git a/lib/validation/queue/QueueName.js b/lib/validation/queue/QueueName.js index c34750279..a9797ae21 100644 --- a/lib/validation/queue/QueueName.js +++ b/lib/validation/queue/QueueName.js @@ -1,26 +1,27 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); /* * Checks whether the queue name adheres to the naming convention * as specified at https://docs.microsoft.com/en-us/rest/api/storageservices/naming-queues-and-metadata */ class QueueName { - constructor() { + constructor() {} + + validate({ request = undefined }) { + const name = request.queueName; + if (name.length < 3 || name.length > 63) { + throw new AError(ErrorCodes.OutOfRangeInput); } - validate({ request = undefined }) { - const name = request.queueName; - if (name.length < 3 || name.length > 63) { - throw new AError(ErrorCodes.OutOfRangeInput); - } - - if (/^([a-z0-9]+)(-[a-z0-9]+)*$/i.test(name) === false) { - throw new AError(ErrorCodes.InvalidInput); - } + if (/^([a-z0-9]+)(-[a-z0-9]+)*$/i.test(name) === false) { + throw new AError(ErrorCodes.InvalidInput); } + } } -module.exports = new QueueName; \ No newline at end of file +module.exports = new QueueName(); diff --git a/lib/validation/queue/ValidationContext.js b/lib/validation/queue/ValidationContext.js index 9d7d5ec95..fbca5906c 100644 --- a/lib/validation/queue/ValidationContext.js +++ b/lib/validation/queue/ValidationContext.js @@ -1,45 +1,58 @@ -'use strict'; +/** @format */ + +"use strict"; /** * The state of all queues and messages is kept in memory only. * Since the validation is synchronous / single-threaded we can be certain about the exact state of the entire * application before and after @see ValidationContext exits. - * + * * In case a validation fails an according @see AzuriteException is thrown which is then processed * by the validation middleware module middleware/queue/validation.js - * + * * @class ValidationContext */ class ValidationContext { - constructor({ request = undefined, queue = undefined, message = undefined, operation = undefined }) { - this.request = request; - this.queue = queue; - this.message = message, - this.operation = operation; - } + constructor({ + request = undefined, + queue = undefined, + message = undefined, + operation = undefined, + }) { + this.request = request; + this.queue = queue; + (this.message = message), (this.operation = operation); + } - /** - * Runs a validation module. - * - * @param {Object} valModule - * @param {Object} moduleOptions - allows a validation module to selectively add attributes or overwrite them - * @param {boolean} skip - if set to true validation module is not run. - * @returns this - * - * @memberOf ValidationContext - */ - run(valModule, moduleOptions, skip) { - if (skip) { - return this; - } - valModule.validate({ - request: moduleOptions ? moduleOptions.request || this.request : this.request, - queue: moduleOptions ? moduleOptions.queue || this.queue : this.queue, - message: moduleOptions ? moduleOptions.message || this.message : this.message, - operation: moduleOptions ? moduleOptions.operation || this.operation : this.operation, - moduleOptions: moduleOptions }); - return this; + /** + * Runs a validation module. + * + * @param {Object} valModule + * @param {Object} moduleOptions - allows a validation module to selectively add attributes or overwrite them + * @param {boolean} skip - if set to true validation module is not run. + * @returns this + * + * @memberOf ValidationContext + */ + run(valModule, moduleOptions, skip) { + if (skip) { + return this; } + valModule.validate({ + request: moduleOptions + ? moduleOptions.request || this.request + : this.request, + queue: moduleOptions ? moduleOptions.queue || this.queue : this.queue, + message: moduleOptions + ? moduleOptions.message || this.message + : this.message, + operation: moduleOptions + ? moduleOptions.operation || this.operation + : this.operation, + moduleOptions: moduleOptions, + }); + return this; + } } -module.exports = ValidationContext; \ No newline at end of file +module.exports = ValidationContext; diff --git a/lib/validation/queue/VisibilityTimeoutValue.js b/lib/validation/queue/VisibilityTimeoutValue.js index 8ef7bbbf8..8c2cf0ef6 100644 --- a/lib/validation/queue/VisibilityTimeoutValue.js +++ b/lib/validation/queue/VisibilityTimeoutValue.js @@ -1,8 +1,10 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - Operations = require('./../../core/Constants').Operations.Queue, - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + Operations = require("./../../core/Constants").Operations.Queue, + ErrorCodes = require("./../../core/ErrorCodes"); /* * Checks whether the visibility timeout value adheres to the specifications at @@ -10,32 +12,42 @@ const AError = require('./../../core/AzuriteError'), * and https://docs.microsoft.com/en-us/rest/api/storageservices/get-messages */ class VisibilityTimeoutValue { - constructor() { - } + constructor() {} - validate({ request = undefined, operation = undefined, message = undefined }) { - if (operation === Operations.GET_MESSAGE) { - if (request.visibilityTimeout < 1 || request.visibilityTimeout > 60 * 60 * 24 * 7) { - throw new AError(ErrorCodes.OutOfRangeInput); - } + validate({ + request = undefined, + operation = undefined, + message = undefined, + }) { + if (operation === Operations.GET_MESSAGE) { + if ( + request.visibilityTimeout < 1 || + request.visibilityTimeout > 60 * 60 * 24 * 7 + ) { + throw new AError(ErrorCodes.OutOfRangeInput); + } + } else { + if ( + request.visibilityTimeout < 0 || + request.visibilityTimeout > 60 * 60 * 24 * 7 + ) { + throw new AError(ErrorCodes.OutOfRangeInput); + } + if (operation === Operations.PUT_MESSAGE) { + if ( + request.now + request.visibilityTimeout > + request.now + request.messageTtl + ) { + throw new AError(ErrorCodes.OutOfRangeInput); } - else { - if (request.visibilityTimeout < 0 || request.visibilityTimeout > 60 * 60 * 24 * 7) { - throw new AError(ErrorCodes.OutOfRangeInput); - } - if (operation === Operations.PUT_MESSAGE) { - if (request.now + request.visibilityTimeout > request.now + request.messageTtl) { - throw new AError(ErrorCodes.OutOfRangeInput); - } - } - if (operation === Operations.UPDATE_MESSAGE) { - if (request.now + request.visibilityTimeout > message.expirationTime) { - throw new AError(ErrorCodes.OutOfRangeInput); - } - } - + } + if (operation === Operations.UPDATE_MESSAGE) { + if (request.now + request.visibilityTimeout > message.expirationTime) { + throw new AError(ErrorCodes.OutOfRangeInput); } + } } + } } -module.exports = new VisibilityTimeoutValue(); \ No newline at end of file +module.exports = new VisibilityTimeoutValue(); diff --git a/lib/validation/table/ConflictingEntity.js b/lib/validation/table/ConflictingEntity.js index 2cfdcd752..f569590c0 100644 --- a/lib/validation/table/ConflictingEntity.js +++ b/lib/validation/table/ConflictingEntity.js @@ -1,17 +1,18 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); class ConflictingEntity { - constructor() { - } + constructor() {} - validate({ entity = undefined }) { - if (entity !== undefined) { - throw new AError(ErrorCodes.EntityAlreadyExists); - } + validate({ entity = undefined }) { + if (entity !== undefined) { + throw new AError(ErrorCodes.EntityAlreadyExists); } + } } -module.exports = new ConflictingEntity; \ No newline at end of file +module.exports = new ConflictingEntity(); diff --git a/lib/validation/table/ConflictingTable.js b/lib/validation/table/ConflictingTable.js index a4ad967f8..6fdfe5a66 100644 --- a/lib/validation/table/ConflictingTable.js +++ b/lib/validation/table/ConflictingTable.js @@ -1,17 +1,18 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); class ConflictingTable { - constructor() { - } + constructor() {} - validate({ table = undefined }) { - if (table !== undefined) { - throw new AError(ErrorCodes.TableAlreadyExists); - } + validate({ table = undefined }) { + if (table !== undefined) { + throw new AError(ErrorCodes.TableAlreadyExists); } + } } -module.exports = new ConflictingTable; \ No newline at end of file +module.exports = new ConflictingTable(); diff --git a/lib/validation/table/EntityExists.js b/lib/validation/table/EntityExists.js index df82dc38f..d644b01e2 100644 --- a/lib/validation/table/EntityExists.js +++ b/lib/validation/table/EntityExists.js @@ -1,17 +1,18 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); class EntityExists { - constructor() { - } + constructor() {} - validate({ entity = undefined }) { - if (entity === undefined) { - throw new AError(ErrorCodes.ResourceNotFound); - } + validate({ entity = undefined }) { + if (entity === undefined) { + throw new AError(ErrorCodes.ResourceNotFound); } + } } -module.exports = new EntityExists; \ No newline at end of file +module.exports = new EntityExists(); diff --git a/lib/validation/table/EntityIfMatch.js b/lib/validation/table/EntityIfMatch.js index 393232e79..97846fde9 100644 --- a/lib/validation/table/EntityIfMatch.js +++ b/lib/validation/table/EntityIfMatch.js @@ -1,24 +1,25 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - N = require('./../../core/HttpHeaderNames'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + N = require("./../../core/HttpHeaderNames"), + ErrorCodes = require("./../../core/ErrorCodes"); class EntityIfMatch { - constructor() { - } + constructor() {} - validate({ request = undefined, entity = undefined }) { - if (request.httpProps[N.IF_MATCH] === undefined) { - throw new AError(ErrorCodes.MissingRequiredHeader); - } - if (request.httpProps[N.IF_MATCH] === '*') { - return; - } - if (request.httpProps[N.IF_MATCH] !== entity._.etag) { - throw new AError(ErrorCodes.UpdateConditionNotSatisfied); - } + validate({ request = undefined, entity = undefined }) { + if (request.httpProps[N.IF_MATCH] === undefined) { + throw new AError(ErrorCodes.MissingRequiredHeader); + } + if (request.httpProps[N.IF_MATCH] === "*") { + return; + } + if (request.httpProps[N.IF_MATCH] !== entity._.etag) { + throw new AError(ErrorCodes.UpdateConditionNotSatisfied); } + } } -module.exports = new EntityIfMatch; \ No newline at end of file +module.exports = new EntityIfMatch(); diff --git a/lib/validation/table/TableExists.js b/lib/validation/table/TableExists.js index 2f14ba1de..df68f92cb 100644 --- a/lib/validation/table/TableExists.js +++ b/lib/validation/table/TableExists.js @@ -1,17 +1,18 @@ -'use strict'; +/** @format */ -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +"use strict"; + +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); class TableExists { - constructor() { - } + constructor() {} - validate({ request = undefined, table = undefined }) { - if (request.tableName !== undefined && table === undefined) { - throw new AError(ErrorCodes.TableNotFound); - } + validate({ request = undefined, table = undefined }) { + if (request.tableName !== undefined && table === undefined) { + throw new AError(ErrorCodes.TableNotFound); } + } } -module.exports = new TableExists; \ No newline at end of file +module.exports = new TableExists(); diff --git a/lib/validation/table/TableName.js b/lib/validation/table/TableName.js index 1f70a9aad..69182a53b 100644 --- a/lib/validation/table/TableName.js +++ b/lib/validation/table/TableName.js @@ -1,26 +1,25 @@ +/** @format */ +"use strict"; -'use strict'; - -const AError = require('./../../core/AzuriteError'), - ErrorCodes = require('./../../core/ErrorCodes'); +const AError = require("./../../core/AzuriteError"), + ErrorCodes = require("./../../core/ErrorCodes"); class TableName { - constructor() { - } + constructor() {} - validate({ table = undefined }) { - if (table === undefined) { - return; - } + validate({ table = undefined }) { + if (table === undefined) { + return; + } - if (/^tables$/i.test(table.name)) { - throw new AError(ErrorCodes.ReservedTableName); - } - if (/[A-Za-z][A-Za-z0-9]{2,62}/i.test(table.name) === false) { - throw new AError(ErrorCodes.InvalidInput); - } + if (/^tables$/i.test(table.name)) { + throw new AError(ErrorCodes.ReservedTableName); + } + if (/[A-Za-z][A-Za-z0-9]{2,62}/i.test(table.name) === false) { + throw new AError(ErrorCodes.InvalidInput); } + } } -module.exports = new TableName; \ No newline at end of file +module.exports = new TableName(); diff --git a/lib/validation/table/ValidationContext.js b/lib/validation/table/ValidationContext.js index 173b22d21..fa0456eac 100644 --- a/lib/validation/table/ValidationContext.js +++ b/lib/validation/table/ValidationContext.js @@ -1,43 +1,48 @@ -'use strict'; +/** @format */ + +"use strict"; /** * The in-memory DB of Azurite serves as the exclusive source of truth for every validation. * Since the validation is synchronous / single-threaded we can be certain about the exact state of the entire * application before and after @see ValidationContext exits. - * + * * In case a validation fails an according @see AzuriteException is thrown which is then processed * by the validation middleware module middleware/table/validation.js - * + * * @class ValidationContext */ class ValidationContext { - constructor({ request = undefined, table = undefined, entity = undefined }) { - this.request = request; - this.table = table; - this.entity = entity; - } + constructor({ request = undefined, table = undefined, entity = undefined }) { + this.request = request; + this.table = table; + this.entity = entity; + } - /** - * Runs a validation module. - * - * @param {Object} valModule - * @param {Object} moduleOptions - allows a validation module to selectively add attributes or overwrite them - * @param {boolean} skip - if set to true validation module is not run. - * @returns this - * - * @memberOf ValidationContext - */ - run(valModule, moduleOptions, skip) { - if (skip) { - return this; - } - valModule.validate({ - request: moduleOptions ? moduleOptions.request || this.request : this.request, - table: moduleOptions ? moduleOptions.table || this.table : this.table, - entity: moduleOptions ? moduleOptions.entity || this.entity : this.entity, - moduleOptions: moduleOptions }); - return this; + /** + * Runs a validation module. + * + * @param {Object} valModule + * @param {Object} moduleOptions - allows a validation module to selectively add attributes or overwrite them + * @param {boolean} skip - if set to true validation module is not run. + * @returns this + * + * @memberOf ValidationContext + */ + run(valModule, moduleOptions, skip) { + if (skip) { + return this; } + valModule.validate({ + request: moduleOptions + ? moduleOptions.request || this.request + : this.request, + table: moduleOptions ? moduleOptions.table || this.table : this.table, + entity: moduleOptions ? moduleOptions.entity || this.entity : this.entity, + moduleOptions: moduleOptions, + }); + return this; + } } -module.exports = ValidationContext; \ No newline at end of file +module.exports = ValidationContext; diff --git a/lib/xml/Serializers.js b/lib/xml/Serializers.js index 4690a31a6..5e1b6647a 100644 --- a/lib/xml/Serializers.js +++ b/lib/xml/Serializers.js @@ -1,96 +1,120 @@ -const BbPromise = require('bluebird'), - xml2js = require('xml2js'), - SignedIdentifiers = require('./SignedIdentifierXmlModel'), - AError = require('./../core/AzuriteError'), - parseStringAsync = BbPromise.promisify(new xml2js.Parser({ explicitArray: true }).parseString), - parseStringAsyncNoArray = BbPromise.promisify(new xml2js.Parser({ explicitArray: false }).parseString), - xml2jsAsync = BbPromise.promisify(require('xml2js').parseString); +/** @format */ + +const BbPromise = require("bluebird"), + xml2js = require("xml2js"), + SignedIdentifiers = require("./SignedIdentifierXmlModel"), + AError = require("./../core/AzuriteError"), + parseStringAsync = BbPromise.promisify( + new xml2js.Parser({ explicitArray: true }).parseString + ), + parseStringAsyncNoArray = BbPromise.promisify( + new xml2js.Parser({ explicitArray: false }).parseString + ), + xml2jsAsync = BbPromise.promisify(require("xml2js").parseString); // see https://docs.microsoft.com/en-us/rest/api/storageservices/Set-Container-ACL?redirectedfrom=MSDN exports.parseSignedIdentifiers = (body) => { - body = body.toString('utf8'); - return parseStringAsync(body) - .then((temp) => { - if (temp === null) { - return null; - } - const model = new SignedIdentifiers(); - if (temp.SignedIdentifiers !== "") { - for (const si of temp.SignedIdentifiers.SignedIdentifier) { - let start; - let expiry; - let permission; - // for case where expiry not defined initially just avoiding a PANIC - if(typeof si.AccessPolicy != 'undefined'){ - if(typeof si.AccessPolicy.Start != 'undefined'){ - start = si.AccessPolicy.Start[0]; - } - else{ - console.log(new Date().toISOString(), ' INFO ACCESS_POLICY_START_UNDEFINED \"', si , '\"'); - } + body = body.toString("utf8"); + return parseStringAsync(body).then((temp) => { + if (temp === null) { + return null; + } + const model = new SignedIdentifiers(); + if (temp.SignedIdentifiers !== "") { + for (const si of temp.SignedIdentifiers.SignedIdentifier) { + let start; + let expiry; + let permission; + // for case where expiry not defined initially just avoiding a PANIC + if (typeof si.AccessPolicy != "undefined") { + if (typeof si.AccessPolicy.Start != "undefined") { + start = si.AccessPolicy.Start[0]; + } else { + console.log( + new Date().toISOString(), + ' INFO ACCESS_POLICY_START_UNDEFINED "', + si, + '"' + ); + } - if(typeof si.AccessPolicy.Expiry != 'undefined'){ - expiry = si.AccessPolicy.Expiry[0]; - } - else{ - // if you have no expiry set on your SAS Key, you are doing something wrong - console.log(new Date().toISOString(), ' ERROR ACCESS_POLICY_EXPIRY_UNDEFINED \"', si, '\"'); - let MAX_TIMESTAMP = 8640000000000000; - expiry = new Date(MAX_TIMESTAMP).toISOString(); - } + if (typeof si.AccessPolicy.Expiry != "undefined") { + expiry = si.AccessPolicy.Expiry[0]; + } else { + // if you have no expiry set on your SAS Key, you are doing something wrong + console.log( + new Date().toISOString(), + ' ERROR ACCESS_POLICY_EXPIRY_UNDEFINED "', + si, + '"' + ); + let MAX_TIMESTAMP = 8640000000000000; + expiry = new Date(MAX_TIMESTAMP).toISOString(); + } - if(typeof si.AccessPolicy.Permission != 'undefined'){ - expiry = si.AccessPolicy.Permission[0]; - } - model.addSignedIdentifier(si.Id[0], start, expiry,permission); - } - else - { - // as Azurite is a tool to aid development, we should notify developers that a mistake has been made - console.log(new Date().toISOString(), ' ERROR ACCESS_POLICY_UNDEFINED \"', si,'\"'); - } - - } - } - return model; - }); -} + if (typeof si.AccessPolicy.Permission != "undefined") { + expiry = si.AccessPolicy.Permission[0]; + } + model.addSignedIdentifier(si.Id[0], start, expiry, permission); + } else { + // as Azurite is a tool to aid development, we should notify developers that a mistake has been made + console.log( + new Date().toISOString(), + ' ERROR ACCESS_POLICY_UNDEFINED "', + si, + '"' + ); + } + } + } + return model; + }); +}; exports.deserializeBlockList = (body) => { - const txt = body.toString('utf8'); - return xml2jsAsync(txt) - .then((result) => { - let blockIds = []; - Object.keys(result.BlockList).forEach((type) => { - result.BlockList[type].forEach((id) => { - blockIds.push({ - type: type, - id: id - }); - }); - }); - return blockIds; - }) - .catch((err) => { - throw new AError('Invalid XML.', 400, 'One of the XML nodes specified in the request body is not supported.'); + const txt = body.toString("utf8"); + return xml2jsAsync(txt) + .then((result) => { + let blockIds = []; + Object.keys(result.BlockList).forEach((type) => { + result.BlockList[type].forEach((id) => { + blockIds.push({ + type: type, + id: id, + }); }); -} + }); + return blockIds; + }) + .catch((err) => { + throw new AError( + "Invalid XML.", + 400, + "One of the XML nodes specified in the request body is not supported." + ); + }); +}; exports.parseServiceProperties = (body) => { - const xml = body.toString('utf8'); - return parseStringAsyncNoArray(xml) - .then((result) => { - if (result.StorageServiceProperties.Cors !== undefined && - result.StorageServiceProperties.Cors.CorsRule !== undefined && - !(result.StorageServiceProperties.Cors.CorsRule instanceof Array)) { - - const rule = result.StorageServiceProperties.Cors.CorsRule; - result.StorageServiceProperties.Cors.CorsRule = []; - result.StorageServiceProperties.Cors.CorsRule.push(rule); - } - return result; - }) - .catch((err) => { - throw new AError('Invalid XML.', 400, 'One of the XML nodes specified in the request body is not supported.'); - }); -} \ No newline at end of file + const xml = body.toString("utf8"); + return parseStringAsyncNoArray(xml) + .then((result) => { + if ( + result.StorageServiceProperties.Cors !== undefined && + result.StorageServiceProperties.Cors.CorsRule !== undefined && + !(result.StorageServiceProperties.Cors.CorsRule instanceof Array) + ) { + const rule = result.StorageServiceProperties.Cors.CorsRule; + result.StorageServiceProperties.Cors.CorsRule = []; + result.StorageServiceProperties.Cors.CorsRule.push(rule); + } + return result; + }) + .catch((err) => { + throw new AError( + "Invalid XML.", + 400, + "One of the XML nodes specified in the request body is not supported." + ); + }); +}; diff --git a/lib/xml/SignedIdentifierXmlModel.js b/lib/xml/SignedIdentifierXmlModel.js index 358672b2a..713c973cf 100644 --- a/lib/xml/SignedIdentifierXmlModel.js +++ b/lib/xml/SignedIdentifierXmlModel.js @@ -1,20 +1,22 @@ -'use strict'; +/** @format */ + +"use strict"; class SignedIdentifiers { - constructor() { - this.SignedIdentifier = []; - } + constructor() { + this.SignedIdentifier = []; + } - addSignedIdentifier(id, start, expiry, permissionlist) { - this.SignedIdentifier.push({ - Id: id, - AccessPolicy: { - Start: start, - Expiry: expiry, - Permission: permissionlist - } - }); - } + addSignedIdentifier(id, start, expiry, permissionlist) { + this.SignedIdentifier.push({ + Id: id, + AccessPolicy: { + Start: start, + Expiry: expiry, + Permission: permissionlist, + }, + }); + } } -module.exports = SignedIdentifiers; \ No newline at end of file +module.exports = SignedIdentifiers; diff --git a/lib/xml/blob/BlobListXmlModel.js b/lib/xml/blob/BlobListXmlModel.js index 2305911d9..4c4302125 100644 --- a/lib/xml/blob/BlobListXmlModel.js +++ b/lib/xml/blob/BlobListXmlModel.js @@ -1,65 +1,67 @@ -'use strict'; +/** @format */ + +"use strict"; /* * These classes are used as model for XML-Serialization in the "ListBlobs" API. */ class BlobList { - constructor() { - this.Prefix = ''; - this.Marker = ''; - this.MaxResults = ''; - this.Delimiter = ''; - this.Blobs = { - Blob: [], - BlobPrefix: '' - }; - this.NextMarker = {}; - } + constructor() { + this.Prefix = ""; + this.Marker = ""; + this.MaxResults = ""; + this.Delimiter = ""; + this.Blobs = { + Blob: [], + BlobPrefix: "", + }; + this.NextMarker = {}; + } } class Blob { - constructor(name, blobType) { - this.Name = name; - this.Snapshot; - this.Properties = new Properties(blobType); - this.Metadata = {}; - } + constructor(name, blobType) { + this.Name = name; + this.Snapshot; + this.Properties = new Properties(blobType); + this.Metadata = {}; + } } class Properties { - constructor(blobType) { - this['Last-Modified']; - this.ETag; - this['Content-Length']; - this['Content-Type']; - this['Content-Encoding']; - this['Content-Language']; - this['Content-MD5']; - this['Cache-Control']; - this.BlobType = blobType; - this.LeaseStatus = 'unlocked'; - this.LeaseState = 'available'; - this.LeaseDuration = 'infinite'; - this.ServerEncrypted = false; - this.CopyId; - this.CopyStatus; - this.CopySource; - this.CopyProgress; - this.CopyCompletionTime; - this.CopyStatusDescription; - } + constructor(blobType) { + this["Last-Modified"]; + this.ETag; + this["Content-Length"]; + this["Content-Type"]; + this["Content-Encoding"]; + this["Content-Language"]; + this["Content-MD5"]; + this["Cache-Control"]; + this.BlobType = blobType; + this.LeaseStatus = "unlocked"; + this.LeaseState = "available"; + this.LeaseDuration = "infinite"; + this.ServerEncrypted = false; + this.CopyId; + this.CopyStatus; + this.CopySource; + this.CopyProgress; + this.CopyCompletionTime; + this.CopyStatusDescription; + } } function blobPrefixesToXml(blobPrefixes) { - let xml = ''; - for (const prefix of blobPrefixes) { - xml += `${prefix}`; - } - return xml; + let xml = ""; + for (const prefix of blobPrefixes) { + xml += `${prefix}`; + } + return xml; } module.exports = { - BlobList: BlobList, - Blob: Blob, - blobPrefixesToXml: blobPrefixesToXml -} \ No newline at end of file + BlobList: BlobList, + Blob: Blob, + blobPrefixesToXml: blobPrefixesToXml, +}; diff --git a/lib/xml/blob/BlockListXmlModel.js b/lib/xml/blob/BlockListXmlModel.js index a08b08907..e6203e769 100644 --- a/lib/xml/blob/BlockListXmlModel.js +++ b/lib/xml/blob/BlockListXmlModel.js @@ -1,33 +1,35 @@ -'use strict'; +/** @format */ -const BlockListType = require('./../../core/Constants').BlockListType; +"use strict"; + +const BlockListType = require("./../../core/Constants").BlockListType; class BlockList { - constructor(blockListType) { - this.CommittedBlocks = { - Block: [] - } - this.UncommittedBlocks = { - Block: [] - } + constructor(blockListType) { + this.CommittedBlocks = { + Block: [], + }; + this.UncommittedBlocks = { + Block: [], + }; - if (blockListType === BlockListType.COMMITTED) { - delete this.UncommittedBlocks; - } - if (blockListType === BlockListType.UNCOMMITTED) { - delete this.CommittedBlocks; - } + if (blockListType === BlockListType.COMMITTED) { + delete this.UncommittedBlocks; + } + if (blockListType === BlockListType.UNCOMMITTED) { + delete this.CommittedBlocks; } + } } class Block { - constructor(name, size) { - this.Name = name; - this.Size = size; - } + constructor(name, size) { + this.Name = name; + this.Size = size; + } } module.exports = { - BlockList: BlockList, - Block: Block -} \ No newline at end of file + BlockList: BlockList, + Block: Block, +}; diff --git a/lib/xml/blob/ContainerListXmlModel.js b/lib/xml/blob/ContainerListXmlModel.js index 21231c053..bf15b422b 100644 --- a/lib/xml/blob/ContainerListXmlModel.js +++ b/lib/xml/blob/ContainerListXmlModel.js @@ -1,39 +1,41 @@ -'use strict'; +/** @format */ + +"use strict"; /* * These classes are used as model for XML-Serialization in the "ListContainer" API. */ class ContainerList { - constructor() { - this.Prefix = ''; - this.Marker = ''; - this.MaxResults = ''; - this.Containers = { - Container: [] - } - this.NextMarker; - } + constructor() { + this.Prefix = ""; + this.Marker = ""; + this.MaxResults = ""; + this.Containers = { + Container: [], + }; + this.NextMarker; + } } class Container { - constructor(name) { - this.Name = name || ''; - this.Properties = new Properties(); - this.Metadata = {}; - } + constructor(name) { + this.Name = name || ""; + this.Properties = new Properties(); + this.Metadata = {}; + } } class Properties { - constructor() { - this['Last-Modified']; - this.ETag; - this.LeaseStatus = 'unlocked'; - this.LeaseState = 'available'; - this.LeaseDuration = 'infinite'; - } + constructor() { + this["Last-Modified"]; + this.ETag; + this.LeaseStatus = "unlocked"; + this.LeaseState = "available"; + this.LeaseDuration = "infinite"; + } } module.exports = { - ContainerList: ContainerList, - Container: Container -} \ No newline at end of file + ContainerList: ContainerList, + Container: Container, +}; diff --git a/lib/xml/blob/PageListXmlModel.js b/lib/xml/blob/PageListXmlModel.js index 7115b546a..56888b380 100644 --- a/lib/xml/blob/PageListXmlModel.js +++ b/lib/xml/blob/PageListXmlModel.js @@ -1,6 +1,8 @@ -'use strict'; +/** @format */ -const os = require('os'); +"use strict"; + +const os = require("os"); /* * The serialization model for GET PageRanges. @@ -9,44 +11,50 @@ const os = require('os'); * See https://docs.microsoft.com/en-us/rest/api/storageservices/fileservices/get-page-ranges for details on the schema. */ class PageListModel { - constructor() { - this.items = []; - } - - addPageRange(startByte, endByte) { - this.items.push(new PageRange(startByte, endByte)); - } - - addClearRange(startByte, endByte) { - this.items.push(new ClearRange(startByte, endByte)); - } - - toString() { - let out = `` + os.EOL; - out += '' + os.EOL; - for (let item of this.items) { - out += (item instanceof PageRange) ? '' + os.EOL : '' + os.EOL; - out += `${item.start}` + os.EOL; - out += `${item.end}` +os.EOL; - out += (item instanceof PageRange) ? '' + os.EOL : '' + os.EOL; - } - out += ''; - return out; + constructor() { + this.items = []; + } + + addPageRange(startByte, endByte) { + this.items.push(new PageRange(startByte, endByte)); + } + + addClearRange(startByte, endByte) { + this.items.push(new ClearRange(startByte, endByte)); + } + + toString() { + let out = `` + os.EOL; + out += "" + os.EOL; + for (let item of this.items) { + out += + item instanceof PageRange + ? "" + os.EOL + : "" + os.EOL; + out += `${item.start}` + os.EOL; + out += `${item.end}` + os.EOL; + out += + item instanceof PageRange + ? "" + os.EOL + : "" + os.EOL; } + out += ""; + return out; + } } class PageRange { - constructor(startByte, endByte) { - this.start = startByte; - this.end = endByte; - } + constructor(startByte, endByte) { + this.start = startByte; + this.end = endByte; + } } class ClearRange { - constructor(startByte, endByte) { - this.start = startByte; - this.end = endByte; - } + constructor(startByte, endByte) { + this.start = startByte; + this.end = endByte; + } } -module.exports = PageListModel; \ No newline at end of file +module.exports = PageListModel; diff --git a/lib/xml/queue/QueueList.js b/lib/xml/queue/QueueList.js index f3ca6f112..f2645cd94 100644 --- a/lib/xml/queue/QueueList.js +++ b/lib/xml/queue/QueueList.js @@ -1,55 +1,63 @@ -'use strict'; +/** @format */ -const js2xml = require('js2xmlparser'); +"use strict"; + +const js2xml = require("js2xmlparser"); /* * These classes are used as model for XML-Serialization in the "ListQueues" API * as specified at https://docs.microsoft.com/en-us/rest/api/storageservices/list-queues1 */ class QueueList { - constructor() { - this.Prefix = undefined; - this.Marker = undefined - this.MaxResults = undefined; - this.Queues = { - Queue: [] - } - this.NextMarker = {}; // this will be converted to by js2xmlparser - } + constructor() { + this.Prefix = undefined; + this.Marker = undefined; + this.MaxResults = undefined; + this.Queues = { + Queue: [], + }; + this.NextMarker = {}; // this will be converted to by js2xmlparser + } - add(queue) { - this.Queues.Queue.push(queue); - } + add(queue) { + this.Queues.Queue.push(queue); + } - toXml() { - if (this.Prefix === undefined) { - delete this.Prefix; - } - if (this.Marker === undefined) { - delete this.Marker; - } - if (this.MaxResults === undefined) { - delete this.MaxResults; - } - let xml = js2xml.parse('EnumerationResults', this); - xml = xml.replace(``, ``); - xml = xml.replace(``, ``); - xml = xml.replace(/\>[\s]+\<'); - return xml; + toXml() { + if (this.Prefix === undefined) { + delete this.Prefix; + } + if (this.Marker === undefined) { + delete this.Marker; } + if (this.MaxResults === undefined) { + delete this.MaxResults; + } + let xml = js2xml.parse("EnumerationResults", this); + xml = xml.replace( + ``, + `` + ); + xml = xml.replace( + ``, + `` + ); + xml = xml.replace(/\>[\s]+\<"); + return xml; + } } class Queue { - constructor(name) { - this.Name = name; - } + constructor(name) { + this.Name = name; + } - addMetadata(metaProps) { - this.Metadata = metaProps; - } + addMetadata(metaProps) { + this.Metadata = metaProps; + } } module.exports = { - QueueList: QueueList, - Queue: Queue -} \ No newline at end of file + QueueList: QueueList, + Queue: Queue, +}; diff --git a/lib/xml/queue/QueueMessageList.js b/lib/xml/queue/QueueMessageList.js index 188fdfdc4..d0a45b446 100644 --- a/lib/xml/queue/QueueMessageList.js +++ b/lib/xml/queue/QueueMessageList.js @@ -1,41 +1,80 @@ -'use strict'; +/** @format */ -const js2xml = require('js2xmlparser'); +"use strict"; + +const js2xml = require("js2xmlparser"); class QueueMessagesListXmlModel { - constructor() { - this.QueueMessage = []; - } + constructor() { + this.QueueMessage = []; + } - add(msg) { - this.QueueMessage.push(msg); - } + add(msg) { + this.QueueMessage.push(msg); + } - toXml() { - const xml = js2xml.parse('QueueMessagesList', this); - return xml.replace(/\>[\s]+\<'); - } + toXml() { + const xml = js2xml.parse("QueueMessagesList", this); + return xml.replace(/\>[\s]+\<"); + } } class QueueMessageXmlModel { - constructor({ messageId = undefined, - insertionTime = undefined, - expirationTime = undefined, - popReceipt = undefined, - timeNextVisible = undefined, - dequeueCount = undefined, - messageText = undefined }) { - this.MessageId = messageId; this.MessageId === undefined ? delete this.MessageId : (() => {/*NOOP*/ }); - this.InsertionTime = insertionTime; this.InsertionTime === undefined ? delete this.InsertionTime : (() => {/*NOOP*/ }); - this.ExpirationTime = expirationTime; this.ExpirationTime === undefined ? delete this.ExpirationTime : (() => {/*NOOP*/ }); - this.PopReceipt = popReceipt; this.PopReceipt === undefined ? delete this.PopReceipt : (() => {/*NOOP*/ }); - this.TimeNextVisible = timeNextVisible; this.TimeNextVisible === undefined ? delete this.TimeNextVisible : (() => {/*NOOP*/ }); - this.DequeueCount = dequeueCount; this.DequeueCount === undefined ? delete this.DequeueCount : (() => {/*NOOP*/ }); - this.MessageText = messageText; this.MessageText === undefined ? delete this.MessageText : (() => {/*NOOP*/ }); - } + constructor({ + messageId = undefined, + insertionTime = undefined, + expirationTime = undefined, + popReceipt = undefined, + timeNextVisible = undefined, + dequeueCount = undefined, + messageText = undefined, + }) { + this.MessageId = messageId; + this.MessageId === undefined + ? delete this.MessageId + : () => { + /*NOOP*/ + }; + this.InsertionTime = insertionTime; + this.InsertionTime === undefined + ? delete this.InsertionTime + : () => { + /*NOOP*/ + }; + this.ExpirationTime = expirationTime; + this.ExpirationTime === undefined + ? delete this.ExpirationTime + : () => { + /*NOOP*/ + }; + this.PopReceipt = popReceipt; + this.PopReceipt === undefined + ? delete this.PopReceipt + : () => { + /*NOOP*/ + }; + this.TimeNextVisible = timeNextVisible; + this.TimeNextVisible === undefined + ? delete this.TimeNextVisible + : () => { + /*NOOP*/ + }; + this.DequeueCount = dequeueCount; + this.DequeueCount === undefined + ? delete this.DequeueCount + : () => { + /*NOOP*/ + }; + this.MessageText = messageText; + this.MessageText === undefined + ? delete this.MessageText + : () => { + /*NOOP*/ + }; + } } module.exports = { - QueueMessageListXmlModel: QueueMessagesListXmlModel, - QueueMessageXmlModel: QueueMessageXmlModel -} \ No newline at end of file + QueueMessageListXmlModel: QueueMessagesListXmlModel, + QueueMessageXmlModel: QueueMessageXmlModel, +}; diff --git a/lib/xml/queue/QueueMessageText.js b/lib/xml/queue/QueueMessageText.js index c29f7edd3..1d3e0c16e 100644 --- a/lib/xml/queue/QueueMessageText.js +++ b/lib/xml/queue/QueueMessageText.js @@ -1,35 +1,36 @@ -'use strict'; +/** @format */ -const BbPromise = require('bluebird'), - AError = require('./../../core/AzuriteError'), - ErrorCode = require('./../../core/ErrorCodes'), - xml2jsAsync = BbPromise.promisify(require('xml2js').parseString), - js2xml = require('js2xmlparser'); +"use strict"; + +const BbPromise = require("bluebird"), + AError = require("./../../core/AzuriteError"), + ErrorCode = require("./../../core/ErrorCodes"), + xml2jsAsync = BbPromise.promisify(require("xml2js").parseString), + js2xml = require("js2xmlparser"); class QueueMessageText { - constructor(msg = undefined) { - this.MessageText = msg; - } + constructor(msg = undefined) { + this.MessageText = msg; + } - static toJs(body) { - const xml = body.toString('utf8'); - if (xml.length === 0) { - return BbPromise.resolve(new QueueMessageText(undefined)); - - } - return xml2jsAsync(xml) - .then((result) => { - return new QueueMessageText(result.QueueMessage.MessageText[0]); - }) - .catch((err) => { - throw new AError(ErrorCode.InvalidXml); - }); + static toJs(body) { + const xml = body.toString("utf8"); + if (xml.length === 0) { + return BbPromise.resolve(new QueueMessageText(undefined)); } + return xml2jsAsync(xml) + .then((result) => { + return new QueueMessageText(result.QueueMessage.MessageText[0]); + }) + .catch((err) => { + throw new AError(ErrorCode.InvalidXml); + }); + } - toXml() { - const xml = js2xml.parse('QueueMessage', this); - return xml.replace(/\>[\s]+\<'); - } + toXml() { + const xml = js2xml.parse("QueueMessage", this); + return xml.replace(/\>[\s]+\<"); + } } -module.exports = QueueMessageText; \ No newline at end of file +module.exports = QueueMessageText; diff --git a/package-lock.json b/package-lock.json index 3a5c86e2c..3ea78c88e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -114,6 +114,86 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.6.0.tgz", "integrity": "sha1-g+9cqGCysy5KDe7e6MdxudtXRx4=" }, + "azure-storage": { + "version": "2.10.1", + "resolved": "https://registry.npmjs.org/azure-storage/-/azure-storage-2.10.1.tgz", + "integrity": "sha512-rnFo1uMIPtilusRCpK91tfY3P4Q7qRsDNwriXdp+OeTIGkGt0cTxL4mhqYfNPYPK+WBQmBdGWhOk+iROM05dcw==", + "dev": true, + "requires": { + "browserify-mime": "~1.2.9", + "extend": "~1.2.1", + "json-edm-parser": "0.1.2", + "md5.js": "1.3.4", + "readable-stream": "~2.0.0", + "request": "^2.86.0", + "underscore": "~1.8.3", + "uuid": "^3.0.0", + "validator": "~9.4.1", + "xml2js": "0.2.8", + "xmlbuilder": "0.4.3" + }, + "dependencies": { + "extend": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/extend/-/extend-1.2.1.tgz", + "integrity": "sha1-oPX9bPyDpf5J72mNYOyKYk3UV2w=", + "dev": true + }, + "process-nextick-args": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", + "integrity": "sha1-FQ4gt1ZZCtP5EJPyWk8q2L/zC6M=", + "dev": true + }, + "readable-stream": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", + "integrity": "sha1-j5A0HmilPMySh4jaz80Rs265t44=", + "dev": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "~1.0.0", + "process-nextick-args": "~1.0.6", + "string_decoder": "~0.10.x", + "util-deprecate": "~1.0.1" + } + }, + "sax": { + "version": "0.5.8", + "resolved": "https://registry.npmjs.org/sax/-/sax-0.5.8.tgz", + "integrity": "sha1-1HLbIo6zMcJQaw6MFVJK25OdEsE=", + "dev": true + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=", + "dev": true + }, + "validator": { + "version": "9.4.1", + "resolved": "https://registry.npmjs.org/validator/-/validator-9.4.1.tgz", + "integrity": "sha512-YV5KjzvRmSyJ1ee/Dm5UED0G+1L4GZnLN3w6/T+zZm8scVua4sOhYKWTUrKa0H/tMiJyO9QLHMPN+9mB/aMunA==", + "dev": true + }, + "xml2js": { + "version": "0.2.8", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.2.8.tgz", + "integrity": "sha1-m4FpCTFjH/CdGVdUn69U9PmAs8I=", + "dev": true, + "requires": { + "sax": "0.5.x" + } + }, + "xmlbuilder": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-0.4.3.tgz", + "integrity": "sha1-xGFLp04K0ZbmCcknLNnh3bKKilg=", + "dev": true + } + } + }, "babel-code-frame": { "version": "6.26.0", "resolved": "https://registry.npmjs.org/babel-code-frame/-/babel-code-frame-6.26.0.tgz", @@ -1195,6 +1275,12 @@ "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", "dev": true }, + "browserify-mime": { + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/browserify-mime/-/browserify-mime-1.2.9.tgz", + "integrity": "sha1-rrGvKN5sDXpqLOQK22j/GEIq8x8=", + "dev": true + }, "byline": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/byline/-/byline-5.0.0.tgz", @@ -1365,7 +1451,7 @@ }, "concat-map": { "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "resolved": false, "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", "dev": true }, @@ -1412,87 +1498,6 @@ "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, - "cross-env": { - "version": "3.2.4", - "resolved": "https://registry.npmjs.org/cross-env/-/cross-env-3.2.4.tgz", - "integrity": "sha1-ngWF8neGTtQhznVvgamA/w1piro=", - "dev": true, - "requires": { - "cross-spawn": "^5.1.0", - "is-windows": "^1.0.0" - }, - "dependencies": { - "cross-spawn": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", - "integrity": "sha1-6L0O/uWPz/b4+UUQoKVUu/ojVEk=", - "dev": true, - "requires": { - "lru-cache": "^4.0.1", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "is-windows": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.1.tgz", - "integrity": "sha1-MQ23D3QtJZoWo2kgK1GvhCMzENk=", - "dev": true - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", - "dev": true - }, - "lru-cache": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.1.tgz", - "integrity": "sha512-q4spe4KTfsAS1SUHLO0wz8Qiyf1+vMIAgpRYioFYDMNqKfHQbg+AVDH3i4fvpl71/P1L0dBl+fQi+P37UYf0ew==", - "dev": true, - "requires": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" - } - }, - "pseudomap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", - "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=", - "dev": true - }, - "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", - "dev": true, - "requires": { - "shebang-regex": "^1.0.0" - } - }, - "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", - "dev": true - }, - "which": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.0.tgz", - "integrity": "sha512-xcJpopdamTuY5duC/KnTTNBraPK54YwpenP4lzxU8H91GudWpFv38u0CKjclE1Wi2EH2EDz5LRcHcKbCIzqGyg==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=", - "dev": true - } - } - }, "cross-spawn": { "version": "5.1.0", "resolved": false, @@ -1598,6 +1603,36 @@ "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.1.tgz", "integrity": "sha1-eePVhlU0aQn+bw9Fpd5oEDspTSA=" }, + "env-cmd": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/env-cmd/-/env-cmd-8.0.2.tgz", + "integrity": "sha512-gHX8MnQXw1iS7dc2KeJdBdxca7spIkxkNwIuORLwm8kDg6xHh5wWnv1Yv3pc64nLZR6kufQSCmwTz16sRmd/rg==", + "dev": true, + "requires": { + "cross-spawn": "^6.0.5" + }, + "dependencies": { + "cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "requires": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + } + }, + "semver": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.5.0.tgz", + "integrity": "sha512-4SJ3dm0WAwWy/NVeioZh5AntkdJoWKxHxcmyP622fOkgHa4z3R0TdBJICINyaSDE6uNwVc8gZr+ZinwZAH4xIA==", + "dev": true + } + } + }, "escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", @@ -1854,7 +1889,7 @@ }, "fs.realpath": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "resolved": false, "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", "dev": true }, @@ -1949,6 +1984,16 @@ "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", "dev": true }, + "hash-base": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", + "integrity": "sha1-X8hoaEfs1zSZQDMZprCj8/auSRg=", + "dev": true, + "requires": { + "inherits": "^2.0.1", + "safe-buffer": "^5.0.1" + } + }, "hawk": { "version": "3.1.3", "resolved": "https://registry.npmjs.org/hawk/-/hawk-3.1.3.tgz", @@ -2034,7 +2079,7 @@ }, "inherits": { "version": "2.0.3", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "resolved": false, "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=", "dev": true }, @@ -2131,6 +2176,15 @@ "integrity": "sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0=", "dev": true }, + "json-edm-parser": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/json-edm-parser/-/json-edm-parser-0.1.2.tgz", + "integrity": "sha1-HmCw/vG8CvZ7wNFG393lSGzWFbQ=", + "dev": true, + "requires": { + "jsonparse": "~1.2.0" + } + }, "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", @@ -2175,6 +2229,12 @@ "integrity": "sha1-LHS27kHZPKUbe1qu6PUDYx0lKnM=", "dev": true }, + "jsonparse": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.2.0.tgz", + "integrity": "sha1-XAxWhRBxYOcv50ib3eoLRMK8Z70=", + "dev": true + }, "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", @@ -2226,6 +2286,16 @@ "yallist": "^2.1.2" } }, + "md5.js": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.4.tgz", + "integrity": "sha1-6b296UogpawYsENA/Fdk1bCdkB0=", + "dev": true, + "requires": { + "hash-base": "^3.0.0", + "inherits": "^2.0.1" + } + }, "media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -2263,7 +2333,7 @@ }, "minimatch": { "version": "3.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.3.tgz", + "resolved": false, "integrity": "sha1-Kk5AkLlrLbBqnX3wEFWmKnfJt3Q=", "dev": true, "requires": { @@ -2400,6 +2470,12 @@ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz", "integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk=" }, + "nice-try": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.4.tgz", + "integrity": "sha512-2NpiFHqC87y/zFke0fC0spBXL3bBsoh/p5H1EFhshxjCR5+0g2d6BiXbUFz9v1sAcxsk2htp2eQnNIci2dIYcA==", + "dev": true + }, "number-is-nan": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", @@ -2432,7 +2508,7 @@ }, "once": { "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "resolved": false, "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", "dev": true, "requires": { @@ -2476,6 +2552,12 @@ "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", "dev": true }, + "path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true + }, "path-parse": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.5.tgz", @@ -2698,6 +2780,12 @@ "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", "dev": true }, + "prettier": { + "version": "1.14.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.14.2.tgz", + "integrity": "sha512-McHPg0n1pIke+A/4VcaS2en+pTNjy4xF+Uuq86u/5dyDO59/TtFZtQ708QIRkEZ3qwKz3GVkVa6mpxK/CpB8Rg==", + "dev": true + }, "private": { "version": "0.1.8", "resolved": "https://registry.npmjs.org/private/-/private-0.1.8.tgz", @@ -2727,7 +2815,7 @@ }, "pseudomap": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "resolved": false, "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=", "dev": true }, @@ -3204,6 +3292,12 @@ "integrity": "sha1-SDEm4Rd03y9xuLY53NeZw3YWK4I=", "dev": true }, + "underscore": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz", + "integrity": "sha1-Tz+1OxBuYJf8+ctBCfKl6b36UCI=", + "dev": true + }, "unique-temp-dir": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/unique-temp-dir/-/unique-temp-dir-1.0.0.tgz", @@ -3278,7 +3372,7 @@ }, "wrappy": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "resolved": false, "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", "dev": true }, diff --git a/package.json b/package.json old mode 100644 new mode 100755 index 79fb7a4c9..fd774fe0f --- a/package.json +++ b/package.json @@ -4,7 +4,8 @@ "description": "A lightweight server clone of Azure Blob, Queue, and Table Storage that simulates most of the commands supported by it with minimal dependencies.", "scripts": { "pretest": "npm run clean", - "test": "cross-env AZURITE_LOCATION=azurite-testdrive mocha --timeout 10000", + "test": "env-cmd ./test/.env mocha", + "externaltests": "env-cmd ./externaltest/.env mocha", "start": "node bin/azurite -l azurite-testdrive", "blob": "node bin/blob -l azurite-testdrive", "queue": "node bin/queue", @@ -61,12 +62,14 @@ "xml2js": "^0.4.17" }, "devDependencies": { + "azure-storage": "^2.10.1", "chai": "^3.5.0", "chai-http": "^4.0.0", - "cross-env": "^3.1.3", "cross-var": "^1.1.0", + "env-cmd": "^8.0.2", "mocha": "5.2.0", "pkg": "^4.3.1", + "prettier": "1.14.2", "request-promise": "^4.1.1", "rimraf": "2.6.2" } diff --git a/release-notes.md b/release-notes.md index 6178ce7c1..eb29fb88e 100644 --- a/release-notes.md +++ b/release-notes.md @@ -1,9 +1,18 @@ -# 2.0 +# 2.0 ## 2.6.6 +- updated testing using azure-storage-node tests in submodule +- application of jshint and prettier rule sets +- multiple fixes for issues in table storage +- fixes [#47](https://github.com/Azure/Azurite/issues/47) - Copied blob has incorrect content type +- merged PR [#32](https://github.com/Azure/Azurite/pull/32) -> thanks to @trekawek +- merged PR [#30](https://github.com/Azure/Azurite/pull/30) -> thanks to @julienr +- merged PR [#46](https://github.com/Azure/Azurite/pull/46) -> thanks to @Fanarito +- merged PR [#47](https://github.com/Azure/Azurite/pull/47) -> thanks to @kalleep - fixes [#12](https://github.com/Azure/Azurite/issues/12) - merged PR [#28](https://github.com/Azure/Azurite/pull/28) fixes [#26](https://github.com/Azure/Azurite/issues/26) -> thanks to @kalleep - merged PR [#25](BLOB shared key authentication) -> thanks to @vitaly-goot - merged PR [#15](https://github.com/Azure/Azurite/pull/15) fixes [#19](https://github.com/Azure/Azurite/issues/19) -> thanks to @david-driscoll + ## 2.6.5 (moved to github.com/azure/azurite) - merged PR [#4](https://github.com/Azure/Azurite/pull/4) -> thanks to @trekawek - merged PR [#2](https://github.com/Azure/Azurite/pull/2) -> thanks to @trekawek diff --git a/test/.env b/test/.env new file mode 100755 index 000000000..309ce9cbb --- /dev/null +++ b/test/.env @@ -0,0 +1,3 @@ +AZURITE_LOCATION=azurite-testdrive +NOCK_OFF=true +AZURE_STORAGE_CONNECTION_STRING=DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1; diff --git a/test/01_Basic_table_Tests.js b/test/01_Basic_table_Tests.js new file mode 100644 index 000000000..38d9fd8bb --- /dev/null +++ b/test/01_Basic_table_Tests.js @@ -0,0 +1,261 @@ +/** @format */ + +const chai = require("chai"), + chaiHttp = require("chai-http"), + should = chai.should(), + expect = chai.expect, + BbPromise = require("bluebird"), + fs = BbPromise.promisifyAll(require("fs-extra")), + Azurite = require("./../../Azurite/lib/AzuriteTable"), + rp = require("request-promise"), + path = require("path"), + xml2js = require("xml2js"), + azureStorage = require("azure-storage"); + +chai.use(chaiHttp); + +const tableName = "testtable"; +// after testing, we need to clean up the DB files etc that we create. +// I wanted to shorten the cycles while debugging so create a new path +// with each pass of the debugger +const tableTestPath = + new Date() + .toISOString() + .replace(/:/g, "") + .replace(/\./g, "") + "_TABLE_TESTS"; +const tableService = azureStorage.createTableService( + "UseDevelopmentStorage=true" +); +const entGen = azureStorage.TableUtilities.entityGenerator; +const partitionKeyForTest = "azurite"; +const rowKeyForTestEntity1 = "1"; +const rowKeyForTestEntity2 = "2"; +const EntityNotFoundErrorMessage = + 'EntityNotFoundThe specified entity does not exist.'; + +describe("Table HTTP Api tests", () => { + const azurite = new Azurite(); + const tableEntity1 = { + PartitionKey: entGen.String(partitionKeyForTest), + RowKey: entGen.String(rowKeyForTestEntity1), + description: entGen.String("foo"), + dueDate: entGen.DateTime(new Date(Date.UTC(2018, 12, 25))), + }; + + const tableEntity2 = { + PartitionKey: entGen.String(partitionKeyForTest), + RowKey: entGen.String(rowKeyForTestEntity2), + description: entGen.String("bar"), + dueDate: entGen.DateTime(new Date(Date.UTC(2018, 12, 26))), + }; + + let entity1Created = false; + + // set us up the tests! + const testDBLocation = path.join(process.env.AZURITE_LOCATION, tableTestPath); + + before(() => { + azurite + .init({ + l: testDBLocation, + silent: "true", + overwrite: "true", + }) + //.then(() => tableService.createTableIfNotExists(tableName, function (error, result, response) { + // would be better to use "createTableIfNotExists" but we may need to make changes server side for this to work + .then(() => + tableService.createTable(tableName, function(error, result, response) { + tableService.insertEntity(tableName, tableEntity1, function( + error, + result, + response + ) { + if (error === null) { + entity1Created = true; + tableService.insertEntity(tableName, tableEntity2, function( + error, + result, + response + ) { + if (error === null) { + } + }); + } else { + throw error; + } + }); + }) + ); + }); + + // JSON response described here (but we are using storage SDK) + // https://docs.microsoft.com/en-us/rest/api/storageservices/query-entities + /* + { "value":[ + { + "PartitionKey":"Customer", + "RowKey":"Name", + "Timestamp":"2013-08-22T00:20:16.3134645Z", + etc... + */ + // The value validation below works for both Azure Cloud Table Storage and Azurite's API + // if you make changes, please ensure that you test against both + describe("GET Table Entities", () => { + it("should retrieve Entity 1 by PartitionKey and RowKey", (done) => { + // there is some race condition sometimes, depending on the speed of the testing system + // currently this delay solves it, until I can fix the before statement to deal + // with a promise for DB creation, and wrap test entity creation in said promise + // even though the initialization of Azurite should be promisified already, this is prone + // to error. + if (entity1Created === false) { + const getE1 = setTimeout(() => { + singleEntityTest(done); + }, 500); + } else { + singleEntityTest(done); + } + }); + + function singleEntityTest(cb) { + // I create a new tableService, as the oringal above was erroring out + // with a socket close if I reuse it + const retrievalTableService = azureStorage.createTableService( + "UseDevelopmentStorage=true" + ); + retrievalTableService.retrieveEntity( + tableName, + partitionKeyForTest, + rowKeyForTestEntity1, + function(error, result, response) { + expect(error).to.equal(null); + expect(result).to.not.equal(undefined); + expect(result).to.not.equal(null); + expect(result.PartitionKey._).to.equal(partitionKeyForTest); + expect(result.RowKey._).to.equal(rowKeyForTestEntity1); + expect(result.description._).to.equal(tableEntity1.description._); + expect(result.dueDate._.toISOString().split(".")[0] + "Z").to.equal( + new Date(Date.UTC(2018, 12, 25)).toISOString().split(".")[0] + "Z" + ); + cb(); + } + ); + } + + it("should retrieve all Entities", (done) => { + const query = new azureStorage.TableQuery(); + const retrievalTableService = azureStorage.createTableService( + "UseDevelopmentStorage=true" + ); + retrievalTableService.queryEntities(tableName, query, null, function( + error, + results, + response + ) { + expect(error).to.equal(null); + expect(results.entries.length).to.equal(2); + const sortedResults = results.entries.sort(); + expect(sortedResults[0].description._).to.equal( + tableEntity1.description._ + ); + expect(sortedResults[1].description._).to.equal( + tableEntity2.description._ + ); + expect(sortedResults[0].RowKey._).to.equal(rowKeyForTestEntity1); + expect( + sortedResults[0].dueDate._.toISOString().split(".")[0] + "Z" + ).to.equal( + new Date(Date.UTC(2018, 12, 25)).toISOString().split(".")[0] + "Z" + ); + done(); + }); + }); + + it("should fail to retrieve a non-existing row with 404 EntityNotFound", (done) => { + if (entity1Created === false) { + const getE1 = setTimeout(() => { + missingEntityTest(done); + }, 500); + } else { + missingEntityTest(done); + } + }); + + function missingEntityTest(cb) { + const faillingLookupTableService = azureStorage.createTableService( + "UseDevelopmentStorage=true" + ); + faillingLookupTableService.retrieveEntity( + tableName, + partitionKeyForTest, + "unknownRowKey", + function(error, result, response) { + expect(error.message).to.equal(EntityNotFoundErrorMessage); + expect(response.statusCode).to.equal(404); + cb(); + } + ); + } + + // this test performs a query, rather than a retrieve (which is just a different implementation via + // the SDK, but currently lands in the same place in our implementation which is using LokiJs) + it("should fail to find a non-existing entity with 404 EntityNotFound", (done) => { + if (entity1Created === false) { + const getE1 = setTimeout(() => { + missingEntityFindTest(done); + }, 500); + } else { + missingEntityFindTest(done); + } + }); + + function missingEntityFindTest(cb) { + const query = new azureStorage.TableQuery() + .top(5) + .where("RowKey eq ?", "unknownRowKeyForFindError"); + const faillingFindTableService = azureStorage.createTableService( + "UseDevelopmentStorage=true" + ); + faillingFindTableService.queryEntities(tableName, query, null, function( + error, + result, + response + ) { + expect(error.message).to.equal(EntityNotFoundErrorMessage); + expect(response.statusCode).to.equal(404); + cb(); + }); + } + }); + + describe("PUT and Insert Table Entites", () => { + it("should return a valid object in the result object when creating an Entity in TableStorage using return no content", (done) => { + const insertEntityTableService = azureStorage.createTableService( + "UseDevelopmentStorage=true" + ); + const insertionEntity = { + PartitionKey: entGen.String(partitionKeyForTest), + RowKey: entGen.String("3"), + description: entGen.String("qux"), + dueDate: entGen.DateTime(new Date(Date.UTC(2018, 12, 26))), + }; + + // Request is made by default with "return-no-content" when using the storage-sdk + insertEntityTableService.insertEntity( + tableName, + insertionEntity, + { + echoContent: false, + }, + function(error, result, response) { + // etag format is currently different to that returned from Azure and x-ms-version 2018-03-28 + expect(response.statusCode).to.equal(204); + expect(result).to.not.equal(undefined); + expect(result[".metadata"].etag).to.not.equal(undefined); + done(); + } + ); + }); + }); + + after(() => azurite.close()); +}); diff --git a/test/02_Basic_blob_Tests.js b/test/02_Basic_blob_Tests.js new file mode 100644 index 000000000..909adca23 --- /dev/null +++ b/test/02_Basic_blob_Tests.js @@ -0,0 +1,610 @@ +/** @format */ + +const chai = require("chai"), + chaiHttp = require("chai-http"), + should = chai.should(), + expect = chai.expect, + BbPromise = require("bluebird"), + fs = BbPromise.promisifyAll(require("fs-extra")), + Azurite = require("../lib/AzuriteBlob"), + rp = require("request-promise"), + path = require("path"), + xml2js = require("xml2js"); + +chai.use(chaiHttp); + +const containerName = "testcontainer"; +const blockBlobName = "testblockblob"; +const blockBlobCopiedName = "testblockblob_copied"; +const appendBlobName = "testappendblob"; +const pageBlobName = "testpageblob"; +const url = `http://localhost:10000`; +const urlPath = `/devstoreaccount1`; +const testPath = + new Date() + .toISOString() + .replace(/:/g, "") + .replace(/\./g, "") + "_BLOB_TESTS"; + +function createBlob(containerNamex, blobNamex, payload, blobType) { + // Make sure there is an existing container 'testcontainer' + const optionsContainer = { + method: "PUT", + uri: `http://localhost:10000/devstoreaccount1/${containerNamex}?restype=container`, + body: "", + }; + const optionsBlob = { + method: "PUT", + headers: { + "x-ms-blob-type": blobType, + "Content-Type": "application/octet-stream", + }, + uri: `http://localhost:10000/devstoreaccount1/${containerNamex}/${blobNamex}`, + body: payload, + }; + + return rp(optionsContainer).then(() => { + return rp(optionsBlob); + }); +} + +describe("Blob HTTP API", () => { + const azurite = new Azurite(); + + before(() => { + const location = path.join(process.env.AZURITE_LOCATION, testPath); + return azurite + .init({ l: location, silent: "true", overwrite: "true" }) + .then(() => { + // Make sure there is an existing container 'testcontainer' + const optionsContainer = { + method: "PUT", + uri: `http://localhost:10000/devstoreaccount1/${containerName}?restype=container`, + body: "", + }; + const optionsBlockBlob = { + method: "PUT", + headers: { + "x-ms-blob-type": "BlockBlob", + "Content-Type": "application/octet-stream", + }, + uri: `http://localhost:10000/devstoreaccount1/${containerName}/${blockBlobName}`, + body: "abc123", + }; + const optionsAppendBlob = { + method: "PUT", + headers: { + "x-ms-blob-type": "AppendBlob", + "Content-Type": "application/octet-stream", + }, + uri: `http://localhost:10000/devstoreaccount1/${containerName}/${appendBlobName}`, + body: "", + }; + const optionsPageBlob = { + method: "PUT", + headers: { + "x-ms-blob-type": "PageBlob", + "Content-Type": "application/octet-stream", + }, + uri: `http://localhost:10000/devstoreaccount1/${containerName}/${pageBlobName}`, + body: "", + }; + return rp(optionsContainer) + .then(() => { + return rp(optionsBlockBlob); + }) + .then(() => { + return rp(optionsAppendBlob); + }) + .then(() => { + return rp(optionsPageBlob); + }); + }); + }); + + after(() => { + return azurite.close(); + }); + + describe("PUT Block Blob", () => { + it("should fail to create a block due to missing container", () => { + return chai + .request(url) + .put(`${urlPath}/DOESNOTEXISTS/blob`) + .set("x-ms-blob-type", "BlockBlob") + .set("Content-Type", "application/octet-stream") + .send("THIS IS CONTENT") + .catch((e) => { + e.should.have.status(404); + }); + }); + it("should fail to create a block due to wrong or unsupported blob type", () => { + return chai + .request(url) + .put(`${urlPath}/${containerName}/blob`) + .set("x-ms-blob-type", "NOTSUPPORTED") + .set("Content-Type", "application/octet-stream") + .send("THIS IS CONTENT") + .catch((e) => { + e.should.have.status(400); + }); + }); + it("should create a simple block blob without meta headers", () => { + return chai + .request(url) + .put(`${urlPath}/${containerName}/blob`) + .set("x-ms-blob-type", "BlockBlob") + .set("Content-Type", "application/octet-stream") + .send("abcdefghijklmn") + .then((res) => { + res.should.have.status(201); + }); + }); + }); + + describe("Put BlockList", () => { + const putBlockListBlobName = "dir/putBlockListBlobName"; + it("should create a block blob from a list of blocks", () => { + const optionsBlockBlob = { + method: "PUT", + headers: { + "Content-Type": "application/octet-stream", + "Content-Length": 6, + }, + qs: { + comp: "block", + blockid: "AAAAAA==", + }, + uri: `http://localhost:10000/devstoreaccount1/${containerName}/${putBlockListBlobName}`, + body: "AAAAAA", + }; + + return rp(optionsBlockBlob) + .then(() => { + optionsBlockBlob.body = "BBBBBB"; + optionsBlockBlob.qs.blockid = "BBBBBB=="; + return rp(optionsBlockBlob); + }) + .then(() => { + optionsBlockBlob.body = "CCCCCC"; + optionsBlockBlob.qs.blockid = "CCCCCC=="; + return rp(optionsBlockBlob); + }) + .then(() => { + optionsBlockBlob.body = "DDDDDD"; + optionsBlockBlob.qs.blockid = "DDDDDD=="; + return rp(optionsBlockBlob); + }) + .then(() => { + const xmlBody = ` + + AAAAAA== + CCCCCC== + AAAAAA== + `; + return chai + .request(url) + .put(`${urlPath}/${containerName}/${putBlockListBlobName}`) + .query({ comp: "blocklist" }) + .send(xmlBody) + .then((res) => { + res.should.have.status(201); + }); + }); + }); + }); + + describe("Delete Blob", () => { + it("should delete an existing Block Blob", () => { + return createBlob("deleteblobtest", "blob", "abc123", "BlockBlob") + .then(() => { + return chai.request(url).delete(`${urlPath}/deleteblobtest/blob`); + }) + .then((res) => { + res.should.have.status(202); + }); + }); + it("should fail when deleting a non-existant blob", () => { + return chai + .request(url) + .delete(`${urlPath}/deleteblobtest/DOESNOTEXIST`) + .catch((e) => { + e.should.have.status(404); + }); + }); + it("should fail when deleting from a non-existant container", () => { + return chai + .request(url) + .delete(`${urlPath}/DOESNOTEXIST/DOESNOTEXIST`) + .catch((e) => { + e.should.have.status(404); + }); + }); + }); + + describe("Append Blobs", () => { + it("should create an append blob", () => { + return chai + .request(url) + .put(`${urlPath}/${containerName}/appendBlob`) + .set("x-ms-blob-type", "AppendBlob") + .set("Content-Type", "application/octet-stream") + .then((res) => { + res.should.have.status(201); + }); + }); + it("should append data to the append blob", () => { + return chai + .request(url) + .put(`${urlPath}/${containerName}/appendBlob`) + .query({ comp: "appendblock" }) + .set("x-ms-blob-type", "AppendBlob") + .set("Content-Type", "application/octet-stream") + .send("abcdefghi") + .then((res) => { + res.should.have.status(201); + }); + }); + it("should fail to create an append blob with size > 0", () => { + return chai + .request(url) + .put(`${urlPath}/${containerName}/appendBlob`) + .set("x-ms-blob-type", "AppendBlob") + .set("Content-Type", "application/octet-stream") + .send("abcdefg") + .catch((e) => { + e.should.have.status(409); + }); + }); + }); + + describe("Page Blobs", () => { + it("should get an empty page list from the page blob", () => { + return chai + .request(url) + .get(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "pagelist" }) + .then((res) => { + res.should.have.status(200); + xml2js.Parser().parseString(res.text, function(err, result) { + expect(result.PageList).to.not.have.any.keys("PageRange"); + }); + }); + }); + it("should write data to the page blob range [0-511]", () => { + const bodydata = Buffer.alloc(512); + return chai + .request(url) + .put(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "page" }) + .set("x-ms-page-write", "update") + .set("x-ms-range", "bytes=0-511") + .set("Content-Type", "application/octet-stream") + .send(bodydata) + .then((res) => { + res.should.have.status(201); + }); + }); + it("should fail to write data to the page blob with an invalid range", () => { + const bodydata = Buffer.alloc(513); + return chai + .request(url) + .put(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "page" }) + .set("x-ms-page-write", "update") + .set("x-ms-range", "bytes=0-512") + .set("Content-Type", "application/octet-stream") + .send(bodydata) + .catch((e) => { + e.should.have.status(416); + }); + }); + it("should fail to write data to the page blob with an invalid body length", () => { + const bodydata = Buffer.alloc(513); + return chai + .request(url) + .put(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "page" }) + .set("x-ms-page-write", "update") + .set("x-ms-range", "bytes=0-511") + .set("Content-Type", "application/octet-stream") + .send(bodydata) + .catch((e) => { + e.should.have.status(400); + }); + }); + it("should get the page range [0-511] from the page blob", () => { + return chai + .request(url) + .get(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "pagelist" }) + .then((res) => { + res.should.have.status(200); + xml2js.Parser().parseString(res.text, function(err, result) { + expect(result.PageList.PageRange.length).to.equal(1); + expect(result.PageList.PageRange[0]).to.deep.equal({ + Start: ["0"], + End: ["511"], + }); + }); + }); + }); + it("should get the page range [0-511] from the page blob within range [0-1023]", () => { + return chai + .request(url) + .get(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "pagelist" }) + .set("x-ms-range", "bytes=0-1023") + .then((res) => { + res.should.have.status(200); + xml2js.Parser().parseString(res.text, function(err, result) { + expect(result.PageList.PageRange.length).to.equal(1); + expect(result.PageList.PageRange[0]).to.deep.equal({ + Start: ["0"], + End: ["511"], + }); + }); + }); + }); + it("should fail to get the page list from the page blob within an invalid range", () => { + return chai + .request(url) + .get(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "pagelist" }) + .set("x-ms-range", "bytes=0-1095") + .catch((e) => { + e.should.have.status(416); + }); + }); + it("should write data to the page blob range [1024-1535]", () => { + const bodydata = Buffer.alloc(512); + return chai + .request(url) + .put(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "page" }) + .set("x-ms-page-write", "update") + .set("x-ms-range", "bytes=1024-1535") + .set("Content-Type", "application/octet-stream") + .send(bodydata) + .then((res) => { + res.should.have.status(201); + }); + }); + it("should get the page ranges [0-511],[1024-1535] from the page blob", () => { + return chai + .request(url) + .get(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "pagelist" }) + .then((res) => { + res.should.have.status(200); + xml2js.Parser().parseString(res.text, function(err, result) { + expect(result.PageList.PageRange.length).to.equal(2); + expect(result.PageList.PageRange[0]).to.deep.equal({ + Start: ["0"], + End: ["511"], + }); + expect(result.PageList.PageRange[1]).to.deep.equal({ + Start: ["1024"], + End: ["1535"], + }); + }); + }); + }); + it("should write data to the page blob range [512-1023]", () => { + const bodydata = Buffer.alloc(512); + return chai + .request(url) + .put(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "page" }) + .set("x-ms-page-write", "update") + .set("x-ms-range", "bytes=512-1023") + .set("Content-Type", "application/octet-stream") + .send(bodydata) + .then((res) => { + res.should.have.status(201); + }); + }); + it("should get the page range [0-1535] from the page blob", () => { + return chai + .request(url) + .get(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "pagelist" }) + .then((res) => { + res.should.have.status(200); + xml2js.Parser().parseString(res.text, function(err, result) { + expect(result.PageList.PageRange.length).to.equal(1); + expect(result.PageList.PageRange[0]).to.deep.equal({ + Start: ["0"], + End: ["1535"], + }); + }); + }); + }); + it("should clear data in the page blob range [512-1023]", () => { + return chai + .request(url) + .put(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "page" }) + .set("x-ms-page-write", "clear") + .set("x-ms-range", "bytes=512-1023") + .then((res) => { + res.should.have.status(201); + }); + }); + it("should get the page ranges [0-511],[1024-1535] from the page blob", () => { + return chai + .request(url) + .get(`${urlPath}/${containerName}/${pageBlobName}`) + .query({ comp: "pagelist" }) + .then((res) => { + res.should.have.status(200); + xml2js.Parser().parseString(res.text, function(err, result) { + expect(result.PageList.PageRange.length).to.equal(2); + expect(result.PageList.PageRange[0]).to.deep.equal({ + Start: ["0"], + End: ["511"], + }); + expect(result.PageList.PageRange[1]).to.deep.equal({ + Start: ["1024"], + End: ["1535"], + }); + }); + }); + }); + }); + + describe("GET Blob", () => { + it("should get the correct content of the Block Blob", () => { + const optionsBlockBlobGet = { + method: "GET", + headers: { + "Content-Type": "application/octet-stream", + }, + uri: `http://localhost:10000/devstoreaccount1/${containerName}/${blockBlobName}`, + }; + return rp(optionsBlockBlobGet).then((res) => { + expect(res).to.be.equal("abc123"); + }); + }); + it("should get the correct type of the append blob", () => { + return chai + .request(url) + .get(`${urlPath}/${containerName}/${appendBlobName}`) + .then((res) => { + res.should.have.status(200); + res.should.have.header("x-ms-blob-type", "AppendBlob"); + }); + }); + }); + + describe("Blob Metadata", () => { + it("should update an existing blob with metadata.", () => { + return chai + .request(url) + .put(`${urlPath}/${containerName}/${blockBlobName}`) + .query({ comp: "metadata" }) + .set("x-ms-meta-test1", "value1") + .set("x-ms-meta-test2", "value2") + .set("x-ms-meta-meta1", "meta1Value") + .then((res) => { + res.should.have.status(200); + }); + }); + it("should get the correct metadata", () => { + return chai + .request(url) + .get(`${urlPath}/${containerName}/${blockBlobName}`) + .query({ comp: "metadata" }) + .then((res) => { + res.should.have.status(200); + res.should.have.header("x-ms-meta-test1", "value1"); + res.should.have.header("x-ms-meta-test2", "value2"); + res.should.have.header("x-ms-meta-meta1", "meta1Value"); + res.should.have.header("Last-Modified"); + res.should.have.header("ETag"); + }); + }); + it("should fail to get metadata of a non-existant blob", () => { + return chai + .request(url) + .get(`${urlPath}/${containerName}/BLOB_DOESNOTEXISTS`) + .query({ comp: "metadata" }) + .catch((e) => { + e.should.have.status(404); + }); + }); + it("should fail to get metadata of a blob in a non-existant container", () => { + return chai + .request(url) + .get(`${urlPath}/CONTAINER_DOESNOTEXIST/BLOB_DOESNOTEXISTS`) + .query({ comp: "metadata" }) + .catch((e) => { + e.should.have.status(404); + }); + }); + }); + + describe("Blob Properties", () => { + it("should successfully set all system properties", () => { + return chai + .request(url) + .put(`${urlPath}/${containerName}/${blockBlobName}`) + .set("x-ms-blob-cache-control", "true") + .set("x-ms-blob-content-type", "ContentType") + .set("x-ms-blob-content-md5", "ContentMD5") + .set("x-ms-blob-content-encoding", "ContentEncoding") + .set("x-ms-blob-content-language", "ContentLanguage") + .query({ comp: "properties" }) + .then((res) => { + res.should.have.status(200); + }); + }); + it("should get all previously set system properties", () => { + return chai + .request(url) + .head(`${urlPath}/${containerName}/${blockBlobName}`) + .then((res) => { + res.should.have.status(200); + res.should.have.header("ETag"); + res.should.have.header("Last-Modified"); + res.should.have.header("Content-Type", "ContentType"); + res.should.have.header("Content-Encoding", "ContentEncoding"); + res.should.have.header("Content-MD5", "ContentMD5"); + res.should.have.header("Content-Language", "ContentLanguage"); + res.should.have.header("Cache-Control", "true"); + res.should.have.header("x-ms-blob-type", "BlockBlob"); + }); + }); + }); + + describe("Copy Blob", () => { + it("should copy a blob with same properties with source blob", () => { + const optionsProperties = { + method: "PUT", + headers: { + "x-ms-blob-content-type": "Content-Type", + "x-ms-blob-content-encoding": "Content-Encoding", + "x-ms-blob-content-language": "Content-Language", + "x-ms-blob-cache-control": "true", + "x-ms-blob-content-md5": "Content-MD5", + "x-ms-blob-content-disposition": "Content-Disposition", + }, + qs: { + comp: "properties", + }, + uri: `http://localhost:10000/devstoreaccount1/${containerName}/${blockBlobName}`, + }; + + const optionsCopyBlob = { + method: "PUT", + headers: { + "x-ms-copy-source": `http://localhost:10000/devstoreaccount1/${containerName}/${blockBlobName}`, + }, + uri: `http://localhost:10000/devstoreaccount1/${containerName}/${blockBlobCopiedName}`, + }; + + return rp(optionsProperties) + .then(() => { + return rp(optionsCopyBlob); + }) + .then(() => { + return chai + .request(url) + .head(`${urlPath}/${containerName}/${blockBlobCopiedName}`) + .then((res) => { + res.should.have.status(200); + res.should.have.header("Content-Type", "Content-Type"); + res.should.have.header("Content-Encoding", "Content-Encoding"); + res.should.have.header("Content-MD5", "Content-MD5"); + res.should.have.header("Content-Language", "Content-Language"); + res.should.have.header("Cache-Control", "true"); + res.should.have.header( + "Content-Disposition", + "Content-Disposition" + ); + res.should.have.header("x-ms-blob-type", "BlockBlob"); + }); + }); + }); + }); +}); diff --git a/test/03_Basic_container_Tests.js b/test/03_Basic_container_Tests.js new file mode 100644 index 000000000..3921f3070 --- /dev/null +++ b/test/03_Basic_container_Tests.js @@ -0,0 +1,206 @@ +/** @format */ + +const chai = require("chai"), + chaiHttp = require("chai-http"), + should = chai.should(), + BbPromise = require("bluebird"), + fs = BbPromise.promisifyAll(require("fs-extra")), + Azurite = require("../lib/AzuriteBlob"), + rp = require("request-promise"), + path = require("path"); + +chai.use(chaiHttp); + +const containerName = "containertestcontainer"; +const propContainer = "propTestcontainer"; +const url = "http://localhost:10000"; +const urlPath = "/devstoreaccount1"; +const testPath = + new Date() + .toISOString() + .replace(/:/g, "") + .replace(/\./g, "") + "_CONTAINER_TESTS"; + +describe("Container HTTP API", () => { + const azurite = new Azurite(); + + before(() => { + const location = path.join(".", process.env.AZURITE_LOCATION, testPath); + return azurite + .init({ l: location, silent: "true", overwrite: "true" }) + .then(() => { + // Make sure there is an existing container 'metadatatestcontainer' + const optionsContainer = { + method: "PUT", + uri: `http://localhost:10000/devstoreaccount1/${propContainer}?restype=container`, + body: "", + }; + return rp(optionsContainer); + }); + }); + + after(() => { + return azurite.close(); + }); + + describe("PUT Simple Container", () => { + it("should create a container", () => { + return chai + .request(url) + .put(`${urlPath}/${containerName}`) + .query({ restype: "container" }) + .then((res) => { + res.should.have.status(201); + }); + }); + it("and a second with the same name that fails", () => { + return chai + .request(url) + .put(`${urlPath}/${containerName}`) + .query({ restype: "container" }) + .catch((e) => { + e.should.have.status(409); + }); + }); + }); + describe("DELETE Simple Container", () => { + it("successfully deletes the container", () => { + return chai + .request(url) + .delete(`${urlPath}/${containerName}`) + .query({ restype: "container" }) + .then((res) => { + res.should.have.status(202); + }); + }); + it("deleting a non-existant container fails", () => { + return chai + .request(url) + .delete(`${urlPath}/DOESNOTEXIST`) + .query({ restype: "container" }) + .catch((e) => { + e.should.have.status(404); + }); + }); + }); + describe("Container Metadata", () => { + it("should update an existing container with metadata.", () => { + return chai + .request(url) + .put(`${urlPath}/${propContainer}`) + .query({ restype: "container", comp: "metadata" }) + .set("x-ms-meta-test1", "value1") + .set("x-ms-meta-test2", "value2") + .set("x-ms-meta-meta1", "meta1Value") + .then((res) => { + res.should.have.status(200); + }); + }); + it("should get the correct metadata. (GET)", () => { + return chai + .request(url) + .get(`${urlPath}/${propContainer}`) + .query({ restype: "container", comp: "metadata" }) + .then((res) => { + res.should.have.status(200); + res.should.have.header("x-ms-meta-test1", "value1"); + res.should.have.header("x-ms-meta-test2", "value2"); + res.should.have.header("x-ms-meta-meta1", "meta1Value"); + res.should.have.header("Last-Modified"); + res.should.have.header("ETag"); + }); + }); + it("should get the correct metadata. (HEAD)", () => { + return chai + .request(url) + .head(`${urlPath}/${propContainer}`) + .query({ restype: "container", comp: "metadata" }) + .then((res) => { + res.should.have.status(200); + res.should.have.header("x-ms-meta-test1", "value1"); + res.should.have.header("x-ms-meta-test2", "value2"); + res.should.have.header("x-ms-meta-meta1", "meta1Value"); + res.should.have.header("Last-Modified"); + res.should.have.header("ETag"); + }); + }); + it("should fail to get metadata of a non-existant container (GET)", () => { + return chai + .request(url) + .get(`${urlPath}/CONTAINER_DOESNOTEXIST`) + .query({ restype: "container", comp: "metadata" }) + .catch((e) => { + e.should.have.status(404); + }); + }); + it("should fail to get metadata of a non-existant container (HEAD)", () => { + return chai + .request(url) + .head(`${urlPath}/CONTAINER_DOESNOTEXIST`) + .query({ restype: "container", comp: "metadata" }) + .catch((e) => { + e.should.have.status(404); + }); + }); + }); + describe("Container System Properties", () => { + it("should update an existing container with metadata.", () => { + return chai + .request(url) + .put(`${urlPath}/${propContainer}`) + .query({ restype: "container", comp: "metadata" }) + .set("x-ms-meta-test1", "value1") + .set("x-ms-meta-test2", "value2") + .set("x-ms-meta-meta1", "meta1Value") + .then((res) => { + res.should.have.status(200); + }); + }); + it("should get the correct metadata. (GET)", () => { + return chai + .request(url) + .get(`${urlPath}/${propContainer}`) + .query({ restype: "container" }) + .then((res) => { + res.should.have.status(200); + res.should.have.header("x-ms-meta-test1", "value1"); + res.should.have.header("x-ms-meta-test2", "value2"); + res.should.have.header("x-ms-meta-meta1", "meta1Value"); + res.should.have.header("Last-Modified"); + res.should.have.header("ETag"); + }); + }); + it("should get the correct metadata. (HEAD)", () => { + return chai + .request(url) + .head(`${urlPath}/${propContainer}`) + .query({ restype: "container" }) + .then((res) => { + res.should.have.status(200); + res.should.have.header("x-ms-meta-test1", "value1"); + res.should.have.header("x-ms-meta-test2", "value2"); + res.should.have.header("x-ms-meta-meta1", "meta1Value"); + res.should.have.header("Last-Modified"); + res.should.have.header("ETag"); + }); + }); + it("should fail to get metadata of a non-existant container (GET)", () => { + return chai + .request(url) + .get(`${urlPath}/CONTAINER_DOESNOTEXIST`) + .query({ restype: "container" }) + .catch((e) => { + e.should.have.status(404); + }); + }); + it("should fail to get metadata of a non-existant container (HEAD)", () => { + return chai + .request(url) + .head(`${urlPath}/CONTAINER_DOESNOTEXIST`) + .query({ restype: "container" }) + .catch((e) => { + e.should.have.status(404); + }); + }); + }); +}); diff --git a/test/04_SnapshotManager_Tests.js b/test/04_SnapshotManager_Tests.js new file mode 100644 index 000000000..e2743c42e --- /dev/null +++ b/test/04_SnapshotManager_Tests.js @@ -0,0 +1,19 @@ +/** @format */ + +"use strict"; + +const chai = require("chai"), + expect = chai.expect, + SnapshotManager = require("../lib/core/blob/SnapshotTimeManager"); + +describe("SnapshotTimeManager", () => { + it("should return a snapshot date that is at least one second greater than previous snapshot of same container-blob", () => { + const timeContext = new Date(), + d1 = SnapshotManager.getDate("id1", timeContext), + d2 = SnapshotManager.getDate("id1", timeContext); + var d1Seconds = d1.getSeconds(), + d2Seconds = d2.getSeconds(); + if (d2Seconds === 0) d2Seconds = 60; + expect(d2Seconds).to.be.greaterThan(d1Seconds); + }); +}); diff --git a/test/05_Basic_queue_Tests.js b/test/05_Basic_queue_Tests.js new file mode 100644 index 000000000..3338c163f --- /dev/null +++ b/test/05_Basic_queue_Tests.js @@ -0,0 +1,104 @@ +/** @format */ + +const QueueName = require("../lib/validation/queue/QueueName"), + AError = require("../lib/core/AzuriteError"); +ErrorCodes = require("../lib/core/ErrorCodes"); +expect = require("chai").expect; + +describe("validation", () => { + describe("QueueName", () => { + const createQueueNameRequest = (queueName) => { + return { request: { queueName } }; + }; + + it("should throw out of range if name is less than three characters", () => { + expect(() => QueueName.validate(createQueueNameRequest(""))).to.throw( + AError, + ErrorCodes.OutOfRangeInput + ); + expect(() => QueueName.validate(createQueueNameRequest("a"))).to.throw( + AError, + ErrorCodes.OutOfRangeInput + ); + expect(() => QueueName.validate(createQueueNameRequest("aa"))).to.throw( + AError, + ErrorCodes.OutOfRangeInput + ); + expect(() => + QueueName.validate(createQueueNameRequest("aaa")) + ).not.to.throw(); + }); + + it("should throw out of range if name is greater than sixty three characters", () => { + const sixtyThreeCharacterStringName = + "012345678901234567890123456789012345678901234567890123456789012"; + + expect(() => + QueueName.validate( + createQueueNameRequest(sixtyThreeCharacterStringName) + ) + ).not.to.throw(); + expect(() => + QueueName.validate( + createQueueNameRequest(sixtyThreeCharacterStringName + "3") + ) + ).to.throw(AError, ErrorCodes.OutOfRangeInput); + expect(() => + QueueName.validate( + createQueueNameRequest(sixtyThreeCharacterStringName + "34") + ) + ).to.throw(AError, ErrorCodes.OutOfRangeInput); + }); + + it("should throw invalid input if name starts with a dash", () => { + expect(() => + QueueName.validate(createQueueNameRequest("-queue")) + ).to.throw(AError, ErrorCodes.InvalidInput); + expect(() => + QueueName.validate(createQueueNameRequest("-queue-name")) + ).to.throw(AError, ErrorCodes.InvalidInput); + }); + + it("should throw invalid input if name ends with a dash", () => { + expect(() => + QueueName.validate(createQueueNameRequest("queue-")) + ).to.throw(AError, ErrorCodes.InvalidInput); + expect(() => + QueueName.validate(createQueueNameRequest("queue-name-")) + ).to.throw(AError, ErrorCodes.InvalidInput); + }); + + it("should throw invalid input if contians two consecutive dashes", () => { + expect(() => + QueueName.validate(createQueueNameRequest("queue--name")) + ).to.throw(AError, ErrorCodes.InvalidInput); + }); + + it("should throw invalid input if contians anything except alphanumeric characters and dashes", () => { + expect(() => + QueueName.validate(createQueueNameRequest("queue-name")) + ).not.to.throw(); + expect(() => + QueueName.validate(createQueueNameRequest("queue1")) + ).not.to.throw(); + expect(() => + QueueName.validate(createQueueNameRequest("QUEUE-name-1")) + ).not.to.throw(); + expect(() => + QueueName.validate(createQueueNameRequest("queue_name")) + ).to.throw(AError, ErrorCodes.InvalidInput); + expect(() => + QueueName.validate(createQueueNameRequest("queue name")) + ).to.throw(AError, ErrorCodes.InvalidInput); + expect(() => + QueueName.validate(createQueueNameRequest("queue~name")) + ).to.throw(AError, ErrorCodes.InvalidInput); + expect(() => + QueueName.validate(createQueueNameRequest("queue@name")) + ).to.throw(AError, ErrorCodes.InvalidInput); + expect(() => + QueueName.validate(createQueueNameRequest("queue:name")) + ).to.throw(AError, ErrorCodes.InvalidInput); + }); + }); +}); diff --git a/test/SnapshotManager_Test.js b/test/SnapshotManager_Test.js deleted file mode 100644 index 9635a6103..000000000 --- a/test/SnapshotManager_Test.js +++ /dev/null @@ -1,17 +0,0 @@ -'use strict'; - -const chai = require('chai'), - expect = chai.expect, - SnapshotManager = require('./../lib/core/blob/SnapshotTimeManager'); - -describe('SnapshotTimeManager', () => { - it('should return a snapshot date that is at least one second greater than previous snapshot of same container-blob', () => { - const timeContext = new Date(), - d1 = SnapshotManager.getDate('id1', timeContext), - d2 = SnapshotManager.getDate('id1', timeContext); - var d1Seconds = d1.getSeconds(), - d2Seconds = d2.getSeconds(); - if (d2Seconds === 0) d2Seconds = 60; - expect(d2Seconds).to.be.greaterThan(d1Seconds); - }); -}); \ No newline at end of file diff --git a/test/blob.js b/test/blob.js deleted file mode 100644 index 85e64b5dc..000000000 --- a/test/blob.js +++ /dev/null @@ -1,505 +0,0 @@ -const chai = require('chai'), - chaiHttp = require('chai-http'), - should = chai.should(), - expect = chai.expect, - BbPromise = require('bluebird'), - fs = BbPromise.promisifyAll(require("fs-extra")), - Azurite = require('./../lib/AzuriteBlob'), - rp = require('request-promise'), - path = require('path'), - xml2js = require('xml2js'); - -chai.use(chaiHttp); - -const containerName = 'testcontainer'; -const blockBlobName = 'testblockblob'; -const appendBlobName = 'testappendblob'; -const pageBlobName = 'testpageblob'; -const url = `http://localhost:10000`; -const urlPath = `/devstoreaccount1`; - - -function createBlob(containerNamex, blobNamex, payload, blobType) { - // Make sure there is an existing container 'testcontainer' - const optionsContainer = { - method: 'PUT', - uri: `http://localhost:10000/devstoreaccount1/${containerNamex}?restype=container`, - body: '' - }; - const optionsBlob = { - method: 'PUT', - headers: { - 'x-ms-blob-type': blobType, - 'Content-Type': 'application/octet-stream' - }, - uri: `http://localhost:10000/devstoreaccount1/${containerNamex}/${blobNamex}`, - body: payload - } - - return rp(optionsContainer) - .then(() => { - return rp(optionsBlob); - }); -} - -describe('Blob HTTP API', () => { - const azurite = new Azurite(); - - before(() => { - const location = path.join(process.env.AZURITE_LOCATION, 'BLOB'); - return azurite.init({ l: location, silent: 'true', overwrite: 'true' }) - .then(() => { - // Make sure there is an existing container 'testcontainer' - const optionsContainer = { - method: 'PUT', - uri: `http://localhost:10000/devstoreaccount1/${containerName}?restype=container`, - body: '' - }; - const optionsBlockBlob = { - method: 'PUT', - headers: { - 'x-ms-blob-type': 'BlockBlob', - 'Content-Type': 'application/octet-stream' - }, - uri: `http://localhost:10000/devstoreaccount1/${containerName}/${blockBlobName}`, - body: 'abc123' - } - const optionsAppendBlob = { - method: 'PUT', - headers: { - 'x-ms-blob-type': 'AppendBlob', - 'Content-Type': 'application/octet-stream' - }, - uri: `http://localhost:10000/devstoreaccount1/${containerName}/${appendBlobName}`, - body: '' - } - const optionsPageBlob = { - method: 'PUT', - headers: { - 'x-ms-blob-type': 'PageBlob', - 'Content-Type': 'application/octet-stream' - }, - uri: `http://localhost:10000/devstoreaccount1/${containerName}/${pageBlobName}`, - body: '' - } - return rp(optionsContainer) - .then(() => { - return rp(optionsBlockBlob); - }) - .then(() => { - return rp(optionsAppendBlob); - }) - .then(() => { - return rp(optionsPageBlob); - }); - }); - }); - - after(() => { - return azurite.close(); - }); - - describe('PUT Block Blob', () => { - it('should fail to create a block due to missing container', () => { - return chai.request(url) - .put(`${urlPath}/DOESNOTEXISTS/blob`) - .set('x-ms-blob-type', 'BlockBlob') - .set('Content-Type', 'application/octet-stream') - .send('THIS IS CONTENT') - .catch((e) => { - e.should.have.status(404); - }) - }); - it('should fail to create a block due to wrong or unsupported blob type', () => { - return chai.request(url) - .put(`${urlPath}/${containerName}/blob`) - .set('x-ms-blob-type', 'NOTSUPPORTED') - .set('Content-Type', 'application/octet-stream') - .send('THIS IS CONTENT') - .catch((e) => { - e.should.have.status(400); - }); - }); - it('should create a simple block blob without meta headers', () => { - return chai.request(url) - .put(`${urlPath}/${containerName}/blob`) - .set('x-ms-blob-type', 'BlockBlob') - .set('Content-Type', 'application/octet-stream') - .send('abcdefghijklmn') - .then((res) => { - res.should.have.status(201); - }); - }); - }); - - describe('Put BlockList', () => { - const putBlockListBlobName = 'dir/putBlockListBlobName'; - it('should create a block blob from a list of blocks', () => { - const optionsBlockBlob = { - method: 'PUT', - headers: { - 'Content-Type': 'application/octet-stream', - 'Content-Length': 6 - }, - qs: { - 'comp': 'block', - 'blockid': 'AAAAAA==' - }, - uri: `http://localhost:10000/devstoreaccount1/${containerName}/${putBlockListBlobName}`, - body: 'AAAAAA' - } - - return rp(optionsBlockBlob) - .then(() => { - optionsBlockBlob.body = 'BBBBBB'; - optionsBlockBlob.qs.blockid = 'BBBBBB==' - return rp(optionsBlockBlob); - }) - .then(() => { - optionsBlockBlob.body = 'CCCCCC'; - optionsBlockBlob.qs.blockid = 'CCCCCC==' - return rp(optionsBlockBlob); - }) - .then(() => { - optionsBlockBlob.body = 'DDDDDD'; - optionsBlockBlob.qs.blockid = 'DDDDDD==' - return rp(optionsBlockBlob); - }) - .then(() => { - const xmlBody = - ` - - AAAAAA== - CCCCCC== - AAAAAA== - ` - return chai.request(url) - .put(`${urlPath}/${containerName}/${putBlockListBlobName}`) - .query({ comp: 'blocklist' }) - .send(xmlBody) - .then((res) => { - res.should.have.status(201); - }); - }); - }); - }); - - describe('Delete Blob', () => { - it('should delete an existing Block Blob', () => { - return createBlob('deleteblobtest', 'blob', 'abc123', 'BlockBlob') - .then(() => { - return chai.request(url) - .delete(`${urlPath}/deleteblobtest/blob`); - }) - .then((res) => { - res.should.have.status(202); - }) - }); - it('should fail when deleting a non-existant blob', () => { - return chai.request(url) - .delete(`${urlPath}/deleteblobtest/DOESNOTEXIST`) - .catch((e) => { - e.should.have.status(404); - }); - }); - it('should fail when deleting from a non-existant container', () => { - return chai.request(url) - .delete(`${urlPath}/DOESNOTEXIST/DOESNOTEXIST`) - .catch((e) => { - e.should.have.status(404); - }); - }); - }); - - describe('Append Blobs', () => { - it('should create an append blob', () => { - return chai.request(url) - .put(`${urlPath}/${containerName}/appendBlob`) - .set('x-ms-blob-type', 'AppendBlob') - .set('Content-Type', 'application/octet-stream') - .then((res) => { - res.should.have.status(201); - }); - }); - it('should append data to the append blob', () => { - return chai.request(url) - .put(`${urlPath}/${containerName}/appendBlob`) - .query({ comp: 'appendblock' }) - .set('x-ms-blob-type', 'AppendBlob') - .set('Content-Type', 'application/octet-stream') - .send('abcdefghi') - .then((res) => { - res.should.have.status(201); - }); - }); - it('should fail to create an append blob with size > 0', () => { - return chai.request(url) - .put(`${urlPath}/${containerName}/appendBlob`) - .set('x-ms-blob-type', 'AppendBlob') - .set('Content-Type', 'application/octet-stream') - .send('abcdefg') - .catch((e) => { - e.should.have.status(409); - }); - }); - }); - - describe('Page Blobs', () => { - it('should get an empty page list from the page blob', () => { - return chai.request(url) - .get(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'pagelist' }) - .then((res) => { - res.should.have.status(200); - xml2js.Parser().parseString(res.text, function(err, result) { - expect(result.PageList).to.not.have.any.keys('PageRange'); - }); - }); - }); - it('should write data to the page blob range [0-511]', () => { - const bodydata = Buffer.alloc(512) - return chai.request(url) - .put(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'page' }) - .set('x-ms-page-write', 'update') - .set('x-ms-range', 'bytes=0-511') - .set('Content-Type', 'application/octet-stream') - .send(bodydata) - .then((res) => { - res.should.have.status(201); - }); - }); - it('should fail to write data to the page blob with an invalid range', () => { - const bodydata = Buffer.alloc(513) - return chai.request(url) - .put(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'page' }) - .set('x-ms-page-write', 'update') - .set('x-ms-range', 'bytes=0-512') - .set('Content-Type', 'application/octet-stream') - .send(bodydata) - .catch((e) => { - e.should.have.status(416); - }); - }); - it('should fail to write data to the page blob with an invalid body length', () => { - const bodydata = Buffer.alloc(513) - return chai.request(url) - .put(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'page' }) - .set('x-ms-page-write', 'update') - .set('x-ms-range', 'bytes=0-511') - .set('Content-Type', 'application/octet-stream') - .send(bodydata) - .catch((e) => { - e.should.have.status(400); - }); - }); - it('should get the page range [0-511] from the page blob', () => { - return chai.request(url) - .get(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'pagelist' }) - .then((res) => { - res.should.have.status(200); - xml2js.Parser().parseString(res.text, function(err, result) { - expect(result.PageList.PageRange.length).to.equal(1); - expect(result.PageList.PageRange[0]).to.deep.equal({"Start":["0"],"End":["511"]}); - }); - }); - }); - it('should get the page range [0-511] from the page blob within range [0-1023]', () => { - return chai.request(url) - .get(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'pagelist' }) - .set('x-ms-range', 'bytes=0-1023') - .then((res) => { - res.should.have.status(200); - xml2js.Parser().parseString(res.text, function(err, result) { - expect(result.PageList.PageRange.length).to.equal(1); - expect(result.PageList.PageRange[0]).to.deep.equal({"Start":["0"],"End":["511"]}); - }); - }); - }); - it('should fail to get the page list from the page blob within an invalid range', () => { - return chai.request(url) - .get(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'pagelist' }) - .set('x-ms-range', 'bytes=0-1095') - .catch((e) => { - e.should.have.status(416); - }); - }); - it('should write data to the page blob range [1024-1535]', () => { - const bodydata = Buffer.alloc(512) - return chai.request(url) - .put(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'page' }) - .set('x-ms-page-write', 'update') - .set('x-ms-range', 'bytes=1024-1535') - .set('Content-Type', 'application/octet-stream') - .send(bodydata) - .then((res) => { - res.should.have.status(201); - }); - }); - it('should get the page ranges [0-511],[1024-1535] from the page blob', () => { - return chai.request(url) - .get(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'pagelist' }) - .then((res) => { - res.should.have.status(200); - xml2js.Parser().parseString(res.text, function(err, result) { - expect(result.PageList.PageRange.length).to.equal(2); - expect(result.PageList.PageRange[0]).to.deep.equal({"Start":["0"],"End":["511"]}); - expect(result.PageList.PageRange[1]).to.deep.equal({"Start":["1024"],"End":["1535"]}); - }); - }); - }); - it('should write data to the page blob range [512-1023]', () => { - const bodydata = Buffer.alloc(512) - return chai.request(url) - .put(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'page' }) - .set('x-ms-page-write', 'update') - .set('x-ms-range', 'bytes=512-1023') - .set('Content-Type', 'application/octet-stream') - .send(bodydata) - .then((res) => { - res.should.have.status(201); - }); - }); - it('should get the page range [0-1535] from the page blob', () => { - return chai.request(url) - .get(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'pagelist' }) - .then((res) => { - res.should.have.status(200); - xml2js.Parser().parseString(res.text, function(err, result) { - expect(result.PageList.PageRange.length).to.equal(1); - expect(result.PageList.PageRange[0]).to.deep.equal({"Start":["0"],"End":["1535"]}); - }); - }); - }); - it('should clear data in the page blob range [512-1023]', () => { - return chai.request(url) - .put(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'page' }) - .set('x-ms-page-write', 'clear') - .set('x-ms-range', 'bytes=512-1023') - .then((res) => { - res.should.have.status(201); - }); - }); - it('should get the page ranges [0-511],[1024-1535] from the page blob', () => { - return chai.request(url) - .get(`${urlPath}/${containerName}/${pageBlobName}`) - .query({ comp: 'pagelist' }) - .then((res) => { - res.should.have.status(200); - xml2js.Parser().parseString(res.text, function(err, result) { - expect(result.PageList.PageRange.length).to.equal(2); - expect(result.PageList.PageRange[0]).to.deep.equal({"Start":["0"],"End":["511"]}); - expect(result.PageList.PageRange[1]).to.deep.equal({"Start":["1024"],"End":["1535"]}); - }); - }); - }); - }); - - describe('GET Blob', () => { - it('should get the correct content of the Block Blob', () => { - const optionsBlockBlobGet = { - method: 'GET', - headers: { - 'Content-Type': 'application/octet-stream' - }, - uri: `http://localhost:10000/devstoreaccount1/${containerName}/${blockBlobName}` - } - return rp(optionsBlockBlobGet) - .then((res) => { - expect(res).to.be.equal('abc123'); - }); - }); - it('should get the correct type of the append blob', () => { - return chai.request(url) - .get(`${urlPath}/${containerName}/${appendBlobName}`) - .then((res) => { - res.should.have.status(200); - res.should.have.header('x-ms-blob-type', 'AppendBlob'); - }); - }); - }); - - describe('Blob Metadata', () => { - it('should update an existing blob with metadata.', () => { - return chai.request(url) - .put(`${urlPath}/${containerName}/${blockBlobName}`) - .query({ comp: 'metadata' }) - .set('x-ms-meta-test1', 'value1') - .set('x-ms-meta-test2', 'value2') - .set('x-ms-meta-meta1', 'meta1Value') - .then((res) => { - res.should.have.status(200); - }); - }); - it('should get the correct metadata', () => { - return chai.request(url) - .get(`${urlPath}/${containerName}/${blockBlobName}`) - .query({ comp: 'metadata' }) - .then((res) => { - res.should.have.status(200); - res.should.have.header('x-ms-meta-test1', 'value1'); - res.should.have.header('x-ms-meta-test2', 'value2'); - res.should.have.header('x-ms-meta-meta1', 'meta1Value'); - res.should.have.header('Last-Modified'); - res.should.have.header('ETag'); - }); - }); - it('should fail to get metadata of a non-existant blob', () => { - return chai.request(url) - .get(`${urlPath}/${containerName}/BLOB_DOESNOTEXISTS`) - .query({ comp: 'metadata' }) - .catch((e) => { - e.should.have.status(404); - }); - }); - it('should fail to get metadata of a blob in a non-existant container', () => { - return chai.request(url) - .get(`${urlPath}/CONTAINER_DOESNOTEXIST/BLOB_DOESNOTEXISTS`) - .query({ comp: 'metadata' }) - .catch((e) => { - e.should.have.status(404); - }); - }); - }); - - describe('Blob Properties', () => { - it('should successfully set all system properties', () => { - return chai.request(url) - .put(`${urlPath}/${containerName}/${blockBlobName}`) - .set('x-ms-blob-cache-control', 'true') - .set('x-ms-blob-content-type', 'ContentType') - .set('x-ms-blob-content-md5', 'ContentMD5') - .set('x-ms-blob-content-encoding', 'ContentEncoding') - .set('x-ms-blob-content-language', 'ContentLanguage') - .query({ comp: 'properties' }) - .then((res) => { - res.should.have.status(200); - }); - }); - it('should get all previously set system properties', () => { - return chai.request(url) - .head(`${urlPath}/${containerName}/${blockBlobName}`) - .then((res) => { - res.should.have.status(200); - res.should.have.header('ETag'); - res.should.have.header('Last-Modified'); - res.should.have.header('Content-Type', 'ContentType'); - res.should.have.header('Content-Encoding', 'ContentEncoding'); - res.should.have.header('Content-MD5', 'ContentMD5'); - res.should.have.header('Content-Language', 'ContentLanguage'); - res.should.have.header('Cache-Control', 'true'); - res.should.have.header('x-ms-blob-type', 'BlockBlob'); - }); - }); - }); -}); diff --git a/test/container.js b/test/container.js deleted file mode 100644 index cfc3e6b7e..000000000 --- a/test/container.js +++ /dev/null @@ -1,186 +0,0 @@ -const chai = require('chai'), - chaiHttp = require('chai-http'), - should = chai.should(), - BbPromise = require('bluebird'), - fs = BbPromise.promisifyAll(require("fs-extra")), - Azurite = require('./../lib/AzuriteBlob'), - rp = require('request-promise'), - path = require('path'); - -chai.use(chaiHttp); - -const containerName = 'containertestcontainer'; -const propContainer = 'propTestcontainer'; -const url = 'http://localhost:10000'; -const urlPath = '/devstoreaccount1'; - -describe('Container HTTP API', () => { - const azurite = new Azurite(); - - before(() => { - const location = path.join('.', process.env.AZURITE_LOCATION, 'CONTAINER'); - return azurite.init({ l: location, silent: 'true', overwrite: 'true' }) - .then(() => { - // Make sure there is an existing container 'metadatatestcontainer' - const optionsContainer = { - method: 'PUT', - uri: `http://localhost:10000/devstoreaccount1/${propContainer}?restype=container`, - body: '' - }; - return rp(optionsContainer); - }); - }); - - after(() => { - return azurite.close(); - }); - - - - describe('PUT Simple Container', () => { - it('should create a container', () => { - return chai.request(url) - .put(`${urlPath}/${containerName}`) - .query({ restype: 'container' }) - .then((res) => { - res.should.have.status(201); - }); - }); - it('and a second with the same name that fails', () => { - return chai.request(url) - .put(`${urlPath}/${containerName}`) - .query({ restype: 'container' }) - .catch((e) => { - e.should.have.status(409); - }) - }); - }); - describe('DELETE Simple Container', () => { - it('successfully deletes the container', () => { - return chai.request(url) - .delete(`${urlPath}/${containerName}`) - .query({ restype: 'container' }) - .then((res) => { - res.should.have.status(202); - }); - }); - it('deleting a non-existant container fails', () => { - return chai.request(url) - .delete(`${urlPath}/DOESNOTEXIST`) - .query({ restype: 'container' }) - .catch((e) => { - e.should.have.status(404); - }); - }); - }); - describe('Container Metadata', () => { - it('should update an existing container with metadata.', () => { - return chai.request(url) - .put(`${urlPath}/${propContainer}`) - .query({ restype: 'container', comp: 'metadata' }) - .set('x-ms-meta-test1', 'value1') - .set('x-ms-meta-test2', 'value2') - .set('x-ms-meta-meta1', 'meta1Value') - .then((res) => { - res.should.have.status(200); - }); - }); - it('should get the correct metadata. (GET)', () => { - return chai.request(url) - .get(`${urlPath}/${propContainer}`) - .query({ restype: 'container', comp: 'metadata' }) - .then((res) => { - res.should.have.status(200); - res.should.have.header('x-ms-meta-test1', 'value1'); - res.should.have.header('x-ms-meta-test2', 'value2'); - res.should.have.header('x-ms-meta-meta1', 'meta1Value'); - res.should.have.header('Last-Modified'); - res.should.have.header('ETag'); - }); - }); - it('should get the correct metadata. (HEAD)', () => { - return chai.request(url) - .head(`${urlPath}/${propContainer}`) - .query({ restype: 'container', comp: 'metadata' }) - .then((res) => { - res.should.have.status(200); - res.should.have.header('x-ms-meta-test1', 'value1'); - res.should.have.header('x-ms-meta-test2', 'value2'); - res.should.have.header('x-ms-meta-meta1', 'meta1Value'); - res.should.have.header('Last-Modified'); - res.should.have.header('ETag'); - }); - }); - it('should fail to get metadata of a non-existant container (GET)', () => { - return chai.request(url) - .get(`${urlPath}/CONTAINER_DOESNOTEXIST`) - .query({ restype: 'container', comp: 'metadata' }) - .catch((e) => { - e.should.have.status(404); - }); - }); - it('should fail to get metadata of a non-existant container (HEAD)', () => { - return chai.request(url) - .head(`${urlPath}/CONTAINER_DOESNOTEXIST`) - .query({ restype: 'container', comp: 'metadata' }) - .catch((e) => { - e.should.have.status(404); - }); - }); - }); - describe('Container System Properties', () => { - it('should update an existing container with metadata.', () => { - return chai.request(url) - .put(`${urlPath}/${propContainer}`) - .query({ restype: 'container', comp: 'metadata' }) - .set('x-ms-meta-test1', 'value1') - .set('x-ms-meta-test2', 'value2') - .set('x-ms-meta-meta1', 'meta1Value') - .then((res) => { - res.should.have.status(200); - }); - }); - it('should get the correct metadata. (GET)', () => { - return chai.request(url) - .get(`${urlPath}/${propContainer}`) - .query({ restype: 'container' }) - .then((res) => { - res.should.have.status(200); - res.should.have.header('x-ms-meta-test1', 'value1'); - res.should.have.header('x-ms-meta-test2', 'value2'); - res.should.have.header('x-ms-meta-meta1', 'meta1Value'); - res.should.have.header('Last-Modified'); - res.should.have.header('ETag'); - }); - }); - it('should get the correct metadata. (HEAD)', () => { - return chai.request(url) - .head(`${urlPath}/${propContainer}`) - .query({ restype: 'container' }) - .then((res) => { - res.should.have.status(200); - res.should.have.header('x-ms-meta-test1', 'value1'); - res.should.have.header('x-ms-meta-test2', 'value2'); - res.should.have.header('x-ms-meta-meta1', 'meta1Value'); - res.should.have.header('Last-Modified'); - res.should.have.header('ETag'); - }); - }); - it('should fail to get metadata of a non-existant container (GET)', () => { - return chai.request(url) - .get(`${urlPath}/CONTAINER_DOESNOTEXIST`) - .query({ restype: 'container' }) - .catch((e) => { - e.should.have.status(404); - }); - }); - it('should fail to get metadata of a non-existant container (HEAD)', () => { - return chai.request(url) - .head(`${urlPath}/CONTAINER_DOESNOTEXIST`) - .query({ restype: 'container' }) - .catch((e) => { - e.should.have.status(404); - }); - }); - }); -}); \ No newline at end of file diff --git a/test/validation.js b/test/validation.js deleted file mode 100644 index e5cca214f..000000000 --- a/test/validation.js +++ /dev/null @@ -1,50 +0,0 @@ -const QueueName = require('../lib/validation/queue/QueueName'), - AError = require('../lib/core/AzuriteError') - ErrorCodes = require('../lib/core/ErrorCodes') - expect = require('chai').expect; - -describe('validation', () => { - describe('QueueName', () => { - const createQueueNameRequest = (queueName) => { return { request: { queueName } } }; - - it('should throw out of range if name is less than three characters', () => { - expect(() => QueueName.validate(createQueueNameRequest(''))).to.throw(AError, ErrorCodes.OutOfRangeInput); - expect(() => QueueName.validate(createQueueNameRequest('a'))).to.throw(AError, ErrorCodes.OutOfRangeInput); - expect(() => QueueName.validate(createQueueNameRequest('aa'))).to.throw(AError, ErrorCodes.OutOfRangeInput); - expect(() => QueueName.validate(createQueueNameRequest('aaa'))).not.to.throw(); - }); - - it('should throw out of range if name is greater than sixty three characters', () => { - const sixtyThreeCharacterStringName = '012345678901234567890123456789012345678901234567890123456789012'; - - expect(() => QueueName.validate(createQueueNameRequest(sixtyThreeCharacterStringName))).not.to.throw(); - expect(() => QueueName.validate(createQueueNameRequest(sixtyThreeCharacterStringName + '3'))).to.throw(AError, ErrorCodes.OutOfRangeInput); - expect(() => QueueName.validate(createQueueNameRequest(sixtyThreeCharacterStringName + '34'))).to.throw(AError, ErrorCodes.OutOfRangeInput); - }); - - it('should throw invalid input if name starts with a dash', () => { - expect(() => QueueName.validate(createQueueNameRequest("-queue"))).to.throw(AError, ErrorCodes.InvalidInput); - expect(() => QueueName.validate(createQueueNameRequest("-queue-name"))).to.throw(AError, ErrorCodes.InvalidInput); - }); - - it('should throw invalid input if name ends with a dash', () => { - expect(() => QueueName.validate(createQueueNameRequest("queue-"))).to.throw(AError, ErrorCodes.InvalidInput); - expect(() => QueueName.validate(createQueueNameRequest("queue-name-"))).to.throw(AError, ErrorCodes.InvalidInput); - }); - - it('should throw invalid input if contians two consecutive dashes', () => { - expect(() => QueueName.validate(createQueueNameRequest("queue--name"))).to.throw(AError, ErrorCodes.InvalidInput); - }); - - it('should throw invalid input if contians anything except alphanumeric characters and dashes', () => { - expect(() => QueueName.validate(createQueueNameRequest("queue-name"))).not.to.throw(); - expect(() => QueueName.validate(createQueueNameRequest("queue1"))).not.to.throw(); - expect(() => QueueName.validate(createQueueNameRequest("QUEUE-name-1"))).not.to.throw(); - expect(() => QueueName.validate(createQueueNameRequest("queue_name"))).to.throw(AError, ErrorCodes.InvalidInput); - expect(() => QueueName.validate(createQueueNameRequest("queue name"))).to.throw(AError, ErrorCodes.InvalidInput); - expect(() => QueueName.validate(createQueueNameRequest("queue~name"))).to.throw(AError, ErrorCodes.InvalidInput); - expect(() => QueueName.validate(createQueueNameRequest("queue@name"))).to.throw(AError, ErrorCodes.InvalidInput); - expect(() => QueueName.validate(createQueueNameRequest("queue:name"))).to.throw(AError, ErrorCodes.InvalidInput); - }); - }); -});