diff --git a/CHANGELOG.md b/CHANGELOG.md index 8d9c2f64..ed29179a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -325,5 +325,9 @@ All notable changes to this project will be documented in this file. Breaking ch ### Added - Adds new query execution option `hydrate`. When a GSI uses a `KEYS_ONLY` projection, the `hydrate` option will perform the query and then a subsequent `batchGet` to "hydrate" the results. +### Changed +- The execution option `limit` is now better optimized for cases where filters might result in index "misses". The `limit` option used to use the `Limit` parameter, but this could result in requiring more requests when user applied filters caused no items from being returned in a single request. + ### Fixed -- A common issue amongst new users, was unexpected errors when using a terminal method twice on a query chain. This would often come up when a user called `.params()` to log out parameters and then call `.go()` on the same chain. The fix was to prevent duplicative side effects from occurring on each subsequent terminal method call. \ No newline at end of file +- A common issue amongst new users, was unexpected errors when using a terminal method twice on a query chain. This would often come up when a user called `.params()` to log out parameters and then call `.go()` on the same chain. The fix was to prevent duplicative side effects from occurring on each subsequent terminal method call. Addresses gh issue #239. + diff --git a/src/entity.js b/src/entity.js index 352ac505..45f76b47 100644 --- a/src/entity.js +++ b/src/entity.js @@ -382,9 +382,7 @@ class Entity { let response = await this._exec(MethodTypes.batchWrite, params, config); if (validations.isFunction(config.parse)) { let parsed = config.parse(config, response); - if (parsed) { - results.push(parsed); - } + results.push(parsed.data); } else { let {unprocessed} = this.formatBulkWriteResponse(response, config); for (let u of unprocessed) { @@ -432,7 +430,8 @@ class Entity { await Promise.all(operation.map(async params => { let response = await this._exec(MethodTypes.batchGet, params, config); if (validations.isFunction(config.parse)) { - resultsAll.push(config.parse(config, response)); + const parsed = config.parse(config, response); + resultsAll.push(parsed.data); } else { this.applyBulkGetResponseFormatting({ orderMaintainer, @@ -473,23 +472,41 @@ class Entity { ExclusiveStartKey = undefined; } let pages = this._normalizePagesValue(config.pages); - let max = this._normalizeLimitValue(config.limit); + let configLimit = this._normalizeNumberOptionsValue('limit', config.limit); + let configCount = this._normalizeNumberOptionsValue('count', config.count); + let max = this._safeMinimum(configLimit, configCount); let iterations = 0; let count = 0; let hydratedUnprocessed = []; const shouldHydrate = config.hydrate && method === MethodTypes.query; do { - let limit = max === undefined - ? parameters.Limit - : max - count; - let response = await this._exec(method, { ExclusiveStartKey, ...parameters, Limit: limit }, config); - ExclusiveStartKey = response.LastEvaluatedKey; + let remainingCount = configCount !== undefined + ? max - count + : undefined; + + let limit = configLimit !== undefined + ? max - count + : undefined; + + let params = { ExclusiveStartKey, ...parameters }; + + if (config.raw || (limit !== undefined && remainingCount === undefined)) { + params.Limit = limit; + } + + let response = await this._exec(method, params, config); + response = this.formatResponse(response, parameters.IndexName, { ...config, + count: remainingCount, includeKeys: shouldHydrate || config.includeKeys, ignoreOwnership: shouldHydrate || config.ignoreOwnership, + _returnLastEvaluatedKeyRaw: true, }); + ExclusiveStartKey = response.lastEvaluatedKey; + delete response.lastEvaluatedKey; + if (config.raw) { return response; } else if (config._isCollectionQuery) { @@ -655,15 +672,35 @@ class Entity { } } + _getLastEvaluatedKeyFromItem({indexName = TableIndex, item}) { + const indexFields = this.model.translations.keys[indexName]; + const tableIndexFields = this.model.translations.keys[TableIndex]; + const lastEvaluatedKey = { + [indexFields.pk]: item[indexFields.pk], + [tableIndexFields.pk]: item[tableIndexFields.pk], + } + if (indexFields.sk && item[indexFields.sk]) { + lastEvaluatedKey[indexFields.sk] = item[indexFields.sk] + } + if (tableIndexFields.sk && item[tableIndexFields.sk]) { + lastEvaluatedKey[tableIndexFields.sk] = item[tableIndexFields.sk] + } + + return lastEvaluatedKey; + } + formatResponse(response, index, config = {}) { let stackTrace; if (!config.originalErr) { stackTrace = new e.ElectroError(e.ErrorCodes.AWSError); } + let lastEvaluatedKey = response.LastEvaluatedKey; try { let results = {}; if (validations.isFunction(config.parse)) { - results = config.parse(config, response); + const parsed = config.parse(config, response); + results = parsed.data; + lastEvaluatedKey = parsed.lastEvaluatedKey; } else if (config.raw && !config._isPagination) { if (response.TableName) { results = {}; @@ -683,12 +720,27 @@ class Entity { results = null; } } else if (response.Items) { + let size = typeof config.count === 'number' ? config.count : response.Items.length; + let count = 0; + let lastItem; results = []; - for (let item of response.Items) { + for (let i = 0; i < response.Items.length; i++) { + const item = { ...response.Items[i] }; if (config.ignoreOwnership || this.ownsItem(item)) { let record = this.model.schema.formatItemForRetrieval(item, config); if (Object.keys(record).length > 0) { + count = count + 1; + if (count > size) { + if (lastItem) { + lastEvaluatedKey = this._getLastEvaluatedKeyFromItem({ + indexName: index, + item: lastItem, + }); + } + break; + } results.push(record); + lastItem = response.Items[i]; } } } @@ -704,8 +756,11 @@ class Entity { } } - if (config._isPagination || response.LastEvaluatedKey) { - const nextPage = this._formatReturnPager(config, response.LastEvaluatedKey); + if (config._isPagination || lastEvaluatedKey) { + const nextPage = this._formatReturnPager(config, lastEvaluatedKey); + if (config._returnLastEvaluatedKeyRaw) { + return { cursor: nextPage || null, data: results, lastEvaluatedKey }; + } return { cursor: nextPage || null, data: results }; } @@ -818,16 +873,31 @@ class Entity { return value; } - _normalizeLimitValue(value) { + _normalizeNumberOptionsValue(option, value) { if (value !== undefined) { value = parseInt(value); if (isNaN(value) || value < 1) { - throw new e.ElectroError(e.ErrorCodes.InvalidLimitOption, "Query option 'limit' must be of type 'number' and greater than zero."); + throw new e.ElectroError(e.ErrorCodes.InvalidLimitOption, `Query option '${option}' must be of type 'number' and greater than zero.`); } } return value; } + _safeMinimum(...values) { + let eligibleNumbers = []; + for (let value of values) { + if (typeof value === 'number') { + eligibleNumbers.push(value); + } + } + + if (eligibleNumbers.length) { + return Math.min(...eligibleNumbers); + } + + return undefined; + } + _createKeyDeconstructor(prefixes = {}, labels = [], attributes = {}) { let {prefix, isCustom, postfix} = prefixes; let names = []; @@ -882,65 +952,6 @@ class Entity { } } - // _deconstructKeys(index, keyType, key, backupFacets = {}) { - // if (typeof key !== "string" || key.length === 0) { - // return null; - // } - // - // let accessPattern = this.model.translations.indexes.fromIndexToAccessPattern[index]; - // let {prefix, isCustom} = this.model.prefixes[index][keyType]; - // let {facets} = this.model.indexes[accessPattern][keyType]; - // let names = []; - // let types = []; - // let pattern = `^${this._regexpEscape(prefix)}`; - // let labels = this.model.facets.labels[index][keyType] || []; - // for (let {name, label} of labels) { - // let attr = this.model.schema.attributes[name]; - // if (attr) { - // if (isCustom) { - // pattern += `${this._regexpEscape(label === undefined ? "" : label)}(.+)`; - // } else { - // pattern += `#${this._regexpEscape(label === undefined ? name : label)}_(.+)`; - // } - // names.push(name); - // types.push(attr.type); - // } - // } - // pattern += "$"; - // let regex = new RegExp(pattern, "i"); - // let match = key.match(regex); - // let results = {}; - // if (match) { - // for (let i = 0; i < names.length; i++) { - // let key = names[i]; - // let value = match[i+1]; - // let type = types[i]; - // switch (type) { - // case "number": - // value = parseFloat(value); - // break; - // case "boolean": - // value = value === "true"; - // break; - // } - // results[key] = value; - // } - // } else { - // if (Object.keys(backupFacets || {}).length === 0) { - // // this can occur when a scan is performed but returns no results given the current filters or record timing - // return {}; - // } - // for (let facet of facets) { - // if (backupFacets[facet] === undefined) { - // throw new e.ElectroError(e.ErrorCodes.LastEvaluatedKey, 'LastEvaluatedKey contains entity that does not match the entity used to query. Use {pager: "raw"} query option.'); - // } else { - // results[facet] = backupFacets[facet]; - // } - // } - // } - // return results; - // } - _deconstructIndex({index = TableIndex, keys = {}} = {}) { const hasIndex = !!this.model.translations.keys[index]; if (!hasIndex) { @@ -1159,6 +1170,10 @@ class Entity { config.params.Limit = option.limit; } + if (typeof option.count === 'number') { + config.count = option.count; + } + if (validations.isStringHasLength(option.table)) { config.params.TableName = option.table; config.table = option.table; @@ -1664,8 +1679,6 @@ class Entity { const { updatedKeys, setAttributes, indexKey } = this._getPutKeys(pk, sk && sk.facets, upsert.data); const upsertAttributes = this.model.schema.translateToFields(setAttributes); const keyNames = Object.keys(indexKey); - // update.set(this.identifiers.entity, this.getName()); - // update.set(this.identifiers.version, this.getVersion()); for (const field of [...Object.keys(upsertAttributes), ...Object.keys(updatedKeys)]) { const value = u.getFirstDefined(upsertAttributes[field], updatedKeys[field]); if (!keyNames.includes(field)) { diff --git a/src/service.js b/src/service.js index dc21f3da..7da42495 100644 --- a/src/service.js +++ b/src/service.js @@ -305,19 +305,26 @@ class Service { cleanseRetrievedData(index = TableIndex, entities, data = {}, config = {}) { if (config.raw) { - return data; + if (config._returnLastEvaluatedKeyRaw) { + return {data, lastEvaluatedKey: data.LastEvaluatedKey}; + } else { + return { data }; + } } const identifiers = getEntityIdentifiers(entities); data.Items = data.Items || []; const results = {}; + let size = typeof config.count === 'number' ? config.count : data.Items.length; + let count = 0; + let lastEvaluatedKey = data.LastEvaluatedKey; for (let {alias} of identifiers) { results[alias] = []; } for (let i = 0; i < data.Items.length; i++) { - const record = data.Items[i]; + const record = { ...data.Items[i] }; if (!record) { continue; @@ -333,7 +340,7 @@ class Service { let formatted; if (config.hydrate) { formatted = { - data: record // entities[entityAlias]._formatKeysToItem(index, record), + data: record }; } else { formatted = entities[entityAlias].formatResponse({Item: record}, index, { @@ -343,9 +350,29 @@ class Service { }); } - results[entityAlias].push(formatted.data); + if (formatted.data) { + count = count + 1; + if (count > size) { + lastEvaluatedKey = entities[entityAlias]._getLastEvaluatedKeyFromItem({ + indexName: index, + item: data.Items[i - 1], + }); + break; + } + results[entityAlias].push(formatted.data); + } + } + + if (config._returnLastEvaluatedKeyRaw) { + return { + data: results, + lastEvaluatedKey + }; + } else { + return { + data: results, + } } - return results; } findKeyOwner(lastEvaluatedKey) { @@ -409,7 +436,7 @@ class Service { // expressions, // DynamoDB doesnt return what I expect it would when provided with these entity filters parse: (options, data) => { if (options.raw) { - return data; + return { data }; } return this.cleanseRetrievedData(index, entities, data, options); }, diff --git a/src/transaction.js b/src/transaction.js index 0ee6df91..29eabd4e 100644 --- a/src/transaction.js +++ b/src/transaction.js @@ -119,29 +119,43 @@ function createTransaction(options) { ...options, parse: (options, data) => { if (options.raw) { - return data; + return { + data + }; } else if (data.canceled) { canceled = true; - return cleanseCanceledData(TableIndex, getEntities(), data, { + const cleansed = cleanseCanceledData(TableIndex, getEntities(), data, { ...options, _isTransaction: true, _paramItems: paramItems, }); + + return { + data: cleansed, + } } else if (data.Responses) { - return cleanseTransactionData(TableIndex, getEntities(), { + const cleansed = cleanseTransactionData(TableIndex, getEntities(), { Items: data.Responses.map(response => response.Item) }, { ...options, _isTransaction: true, _paramItems: paramItems, }); + + return { + data: cleansed, + } } else { - return new Array(paramItems ? paramItems.length : 0).fill({ + const items = new Array(paramItems ? paramItems.length : 0).fill({ item: null, code: 'None', rejected: false, message: undefined, }); + + return { + data: items, + } } } }); diff --git a/test/connected.page.spec.js b/test/connected.page.spec.js index 7975d8eb..25d1f223 100644 --- a/test/connected.page.spec.js +++ b/test/connected.page.spec.js @@ -305,7 +305,7 @@ describe("Page", () => { } ]; for (const test of paginationTests) { - it('should paginate through all records for a given query', async () => { + it(`should paginate through all records for a given ${test.type} operation using limit`, async () => { const pages = 'all'; const limit = 2; let results = []; @@ -322,9 +322,27 @@ describe("Page", () => { test.output, )).to.not.throw; }).timeout(10000); + + it(`should paginate through all records for a given ${test.type} operation using count`, async () => { + const pages = 'all'; + const count = 2; + let results = []; + let cursor = null; + do { + const response = test.type === 'query' + ? await tasks.query[test.input.index](test.input.key).go({cursor, count, pages}) + : await tasks.scan.go({cursor, count, pages}); + results = results.concat(response.data); + cursor = response.cursor; + } while (cursor !== null); + expect(() => Tasks.compareTasks( + results, + test.output, + )).to.not.throw; + }).timeout(50000); } - it("Paginate without overlapping values", async () => { + it("Paginate without overlapping values using limit", async () => { let limit = 30; let count = 0; let cursor = null; @@ -349,7 +367,32 @@ describe("Page", () => { expect(all).to.have.length(keys.size); }).timeout(10000); - it("Paginate without overlapping values with raw response", async () => { + it("Paginate without overlapping values using count", async () => { + let limit = 30; + let count = 0; + let cursor = null; + let all = []; + let keys = new Set(); + do { + count++; + let [next, items] = await tasks.query.assigned({employee: Tasks.employees[0]}) + .go({cursor, count: limit}) + .then(res => [res.cursor, res.data]); + if (next && count > 0) { + const deserialized = cursorFormatter.deserialize(next); + expect(deserialized).to.have.keys(["gsi2pk", "gsi2sk", "pk", "sk"]); + } + expect(items.length <= limit).to.be.true; + for (let item of items) { + keys.add(item.task + item.project + item.employee); + all.push(item); + } + cursor = next; + } while(cursor !== null); + expect(all).to.have.length(keys.size); + }).timeout(10000); + + it("Paginate without overlapping values with raw response using limit", async () => { let limit = 30; let count = 0; let cursor = null; @@ -375,7 +418,7 @@ describe("Page", () => { } while(cursor !== null); }).timeout(10000); - it("Paginate without overlapping values with pager='raw'", async () => { + it("Paginate without overlapping values with pager='raw' using limit", async () => { let limit = 30; let count = 0; let cursor = null; @@ -398,6 +441,29 @@ describe("Page", () => { } while(cursor !== null); }).timeout(10000); + it("Paginate without overlapping values with pager='raw' using count", async () => { + let limit = 30; + let count = 0; + let cursor = null; + let all = []; + + do { + count++; + let keys = new Set(); + let [next, items] = await tasks.query.projects({project: Tasks.projects[0]}).go({count: limit, cursor, pager: "raw"}).then(res => [res.cursor, res.data]); + if (next !== null && count > 1) { + expect(next).to.have.keys(["sk", "pk", "gsi1sk", "gsi1pk"]); + } + expect(items.length <= limit).to.be.true; + for (let item of items) { + keys.add(item.task + item.project + item.employee); + all.push(item); + } + expect(items.length).to.equal(keys.size); + cursor = next; + } while(cursor !== null); + }).timeout(10000); + // it("Should not accept incomplete page composite attributes", async () => { // let tests = [ // { @@ -444,7 +510,7 @@ describe("Page", () => { // }).timeout(10000); it("Should paginate and return raw results", async () => { - let results = await tasks.scan.go({raw: true}); + let results = await tasks.scan.go({ raw: true }); expect(results).to.have.keys(['cursor', 'data']); expect(results.data.Items).to.not.be.undefined expect(results.data.Items).to.be.an("array"); @@ -657,9 +723,54 @@ describe("Page", () => { } }); - it("entity query should only count entities belonging to the collection entities to fulfill 'limit' option requirements", async () => { + it("entity query should continue to query until 'count' option is reached", async () => { const ExclusiveStartKey = {key: 'hi'}; const [one, two, three, four, five, six] = tasks.data; + const {client, calls} = createClient({ + mockResponses: [ + { + Items: [one, two, three], + LastEvaluatedKey: ExclusiveStartKey, + }, + { + Items: [four, five, six], + LastEvaluatedKey: ExclusiveStartKey, + }, + { + Items: [], + LastEvaluatedKey: ExclusiveStartKey, + }, + { + Items: [], + LastEvaluatedKey: undefined, + }, + { + Items: [], + LastEvaluatedKey: ExclusiveStartKey, + } + ] + }); + const pages = 3; + const limit = 5; + const entity = new Tasks(TasksModel, {client, table}); + const results = await entity.query.task({task: "my_task"}).go({pages, count: limit}).then(res => res.data); + // this behaves differently + expect(results).to.be.an("array").with.length(limit); + expect(calls).to.have.length(2); + for (let i = 0; i < calls.length; i++) { + const call = calls[i]; + if (i === 0) { + expect(call.ExclusiveStartKey).to.be.undefined; + } else { + expect(call.ExclusiveStartKey.key).to.equal('hi'); + expect(call.ExclusiveStartKey.key === ExclusiveStartKey.key).to.be.true; + } + } + }); + + it("entity query should only count entities belonging to the collection entities to fulfill 'limit' option requirements", async () => { + const ExclusiveStartKey = { key: 'hi' }; + const [one, two, three, four, five, six] = tasks.data; const {client, calls} = createClient({ mockResponses: [ { @@ -701,6 +812,50 @@ describe("Page", () => { } }); + it("entity query should only count entities belonging to the collection entities to fulfill 'count' option requirements", async () => { + const ExclusiveStartKey = { key: 'hi' }; + const [one, two, three, four, five, six] = tasks.data; + const {client, calls} = createClient({ + mockResponses: [ + { + Items: [{}, {}, one, two, three, {}, {}], + LastEvaluatedKey: ExclusiveStartKey, + }, + { + Items: [four, five, six, {}], + LastEvaluatedKey: ExclusiveStartKey, + }, + { + Items: [], + LastEvaluatedKey: ExclusiveStartKey, + }, + { + Items: [], + LastEvaluatedKey: undefined, + }, + { + Items: [], + LastEvaluatedKey: ExclusiveStartKey, + } + ] + }); + const pages = 3; + const limit = 5; + const entity = new Tasks(TasksModel, {client, table}); + const results = await entity.query.task({task: "my_task"}).go({pages, count: limit}).then(res => res.data); + expect(results).to.be.an("array").with.length(limit); + expect(calls).to.have.length(2); + for (let i = 0; i < calls.length; i++) { + const call = calls[i]; + if (i === 0) { + expect(call.ExclusiveStartKey).to.be.undefined; + } else { + expect(call.ExclusiveStartKey.key).to.equal('hi'); + expect(call.ExclusiveStartKey.key === ExclusiveStartKey.key).to.be.true; + } + } + }); + it("collection query should continue to query until LastEvaluatedKey is not returned", async () => { const ExclusiveStartKey = {key: 'hi'}; const {client, calls} = createClient({ @@ -844,6 +999,58 @@ describe("Page", () => { } }); + it("collection query should continue to query until 'count' option is reached", async () => { + const ExclusiveStartKey = {key: 'hi'}; + const tasks = new Tasks(makeTasksModel(), {client, table}); + const tasks2 = new Tasks(makeTasksModel(), {client, table}); + + await tasks.load(10); + await tasks2.load(10); + const [one, two, three, four, five, six] = tasks.data; + const [seven, eight, nine, ten] = tasks2.data; + const created = createClient({ + mockResponses: [ + { + Items: [one, two, three, seven, eight, nine], + LastEvaluatedKey: ExclusiveStartKey + }, + { + Items: [four, five, six, ten], + LastEvaluatedKey: ExclusiveStartKey + }, + { + Items: [], + LastEvaluatedKey: ExclusiveStartKey + }, + { + Items: [], + LastEvaluatedKey: undefined + }, + { + Items: [], + LastEvaluatedKey: ExclusiveStartKey, + } + ] + }); + const service = new Service({tasks, tasks2}, {client: created.client, table}); + const pages = 3; + const limit = 9; + const employee = "my_employee"; + const results = await service.collections.assignments({employee}).go({pages, count: limit}).then(res => res.data); + expect(results.tasks).to.be.an("array").with.length(6); + expect(results.tasks2).to.be.an("array").with.length(3); + expect(created.calls).to.have.length(2); + for (let i = 0; i < created.calls.length; i++) { + const call = created.calls[i]; + if (i === 0) { + expect(call.ExclusiveStartKey).to.be.undefined; + } else { + expect(call.ExclusiveStartKey.key).to.equal('hi'); + expect(call.ExclusiveStartKey.key === ExclusiveStartKey.key).to.be.true; + } + } + }); + it("collection query should only count entities belonging to the collection entities to fulfill 'limit' option requirements", async () => { const ExclusiveStartKey = {key: 'hi'}; const tasks = new Tasks(makeTasksModel(), {client, table}); @@ -896,7 +1103,59 @@ describe("Page", () => { } }); - it("should automatically paginate all results with query", async () => { + it("collection query should only count entities belonging to the collection entities to fulfill 'count' option requirements", async () => { + const ExclusiveStartKey = {key: 'hi'}; + const tasks = new Tasks(makeTasksModel(), {client, table}); + const tasks2 = new Tasks(makeTasksModel(), {client, table}); + + await tasks.load(10); + await tasks2.load(10); + const [three, four, five, six] = tasks.data; + const [seven, eight, nine] = tasks2.data; + const created = createClient({ + mockResponses: [ + { + Items: [{}, {}, three, seven, eight, nine, {}, {}], + LastEvaluatedKey: ExclusiveStartKey + }, + { + Items: [four, five, six, {}], + LastEvaluatedKey: ExclusiveStartKey + }, + { + Items: [], + LastEvaluatedKey: ExclusiveStartKey + }, + { + Items: [], + LastEvaluatedKey: undefined + }, + { + Items: [], + LastEvaluatedKey: ExclusiveStartKey, + } + ] + }); + const service = new Service({tasks, tasks2}, {client: created.client, table}); + const pages = 3; + const limit = 6; + const employee = "my_employee"; + const results = await service.collections.assignments({employee}).go({pages, count: limit}).then(res => res.data); + expect(results.tasks).to.be.an("array").with.length(3); + expect(results.tasks2).to.be.an("array").with.length(3); + expect(created.calls).to.have.length(2); + for (let i = 0; i < created.calls.length; i++) { + const call = created.calls[i]; + if (i === 0) { + expect(call.ExclusiveStartKey).to.be.undefined; + } else { + expect(call.ExclusiveStartKey.key).to.equal('hi'); + expect(call.ExclusiveStartKey.key === ExclusiveStartKey.key).to.be.true; + } + } + }); + + it("should automatically paginate all results with query with 'limit'", async () => { const project = Tasks.projects[0]; const occurrences = tasks.occurrences.projects[project]; const overLimit = occurrences + 10; @@ -909,6 +1168,19 @@ describe("Page", () => { expect(limited).to.have.length(underLimit); }); + it("should automatically paginate all results with query with 'count'", async () => { + const project = Tasks.projects[0]; + const occurrences = tasks.occurrences.projects[project]; + const overLimit = occurrences + 10; + const underLimit = occurrences - 10; + const results = await tasks.query.projects({project}).go({count: overLimit}).then(res => res.data); + const limited = await tasks.query.projects({project}).go({count: underLimit}).then(res => res.data); + const loaded = tasks.filterLoaded({project}); + expect(() => Tasks.compareTasks(results, loaded)).to.not.throw; + expect(results).to.have.length(occurrences); + expect(limited).to.have.length(underLimit); + }); + // it("should automatically paginate all results with collection", async () => { // const employee = Tasks.employees[0]; // const limit1 = tasks.occurrences.employees[employee]; @@ -948,7 +1220,7 @@ describe("Page", () => { const occurrences = tasks.occurrences.employees[employee]; const pages = 2; const limit = Math.floor(occurrences / 4); - const results = await tasks.query.assigned({employee}).go({pages, params: {Limit: limit}}).then(res => res.data); + const results = await tasks.query.assigned({ employee }).go({ pages, params: { Limit: limit }}).then(res => res.data); expect(limit).to.be.greaterThan(0); expect(occurrences).to.be.greaterThan(limit * pages); expect(results).to.have.length(limit * pages); @@ -1069,7 +1341,7 @@ describe("Page", () => { it("should return the response received by options.parse if value is not array", async () => { let wasParsed = false; let parseArgs = {}; - const parserResponse = {value: true}; + const parserResponse = { value: 12345 }; const project = Tasks.projects[0]; const limit = 1; const results = await tasks.query @@ -1079,15 +1351,13 @@ describe("Page", () => { parse: (config, response) => { wasParsed = true; parseArgs = response; - return parserResponse; + return { data: parserResponse }; } }); expect(wasParsed).to.be.true; - expect(results.data === parserResponse).to.be.true; + expect(results.data).to.deep.equal(parserResponse); expect(parseArgs.Items).to.be.an("array"); expect(parseArgs.LastEvaluatedKey).to.have.keys("pk", "sk", "gsi1sk", "gsi1pk"); - expect(parseArgs.Count).to.equal(1); - expect(parseArgs.ScannedCount).to.equal(1); }); it("should not clobber a user defined ExclusiveStartKey", async () => { diff --git a/test/connected.service.spec.js b/test/connected.service.spec.js index cd9c3cfe..5c77592c 100644 --- a/test/connected.service.spec.js +++ b/test/connected.service.spec.js @@ -1,9 +1,8 @@ const sleep = async (ms) => new Promise((resolve) => setTimeout(resolve, ms)); process.env.AWS_NODEJS_CONNECTION_REUSE_ENABLED = 1; -const { Entity, clauses } = require("../src/entity"); +const { Entity } = require("../src/entity"); const { Service } = require("../src/service"); const { expect } = require("chai"); -const moment = require("moment"); const uuid = require("uuid").v4; const DynamoDB = require("aws-sdk/clients/dynamodb"); const table = "electro"; @@ -862,10 +861,11 @@ describe("Entities with custom identifiers and versions", () => { let collectionA = await service.collections .collectionA({prop1}) .where(({prop2}, {eq}) => eq(prop2, "prop2Value")) - .go({raw: true}) + .go({ raw: true }) .then(res => res.data) - .then((data) => ({success: true, data})) - .catch(err => ({success: false, err})); + .then((data) => ({ success: true, data })) + .catch(err => ({ success: false, err })); + expect(collectionA.success).to.be.true; expect(collectionA.data).to.be.deep.equal({ "Items": [ diff --git a/test/ts_connected.crud.spec.ts b/test/ts_connected.crud.spec.ts index c3e73a54..d19d1069 100644 --- a/test/ts_connected.crud.spec.ts +++ b/test/ts_connected.crud.spec.ts @@ -4618,8 +4618,8 @@ describe('terminal methods', () => { }) }); -describe('query limit', () => { - it('adding a limit should not cause dropped items when paginating', async () => { +describe('query size', () => { + it('adding a size should not cause dropped items when paginating', async () => { const entity = new Entity({ model: { version: '1', @@ -4665,19 +4665,20 @@ describe('query limit', () => { description: uuid(), } } - const limit = 10; + const count = 10; const itemCount = 100; + const items = new Array(itemCount) + .fill({}) + .map(createItem); - const items = new Array(itemCount).fill({}).map(createItem); await entity.put(items).go(); - let iterations = 0; let cursor: string | null = null; let results: EntityItem[] = []; do { const response: QueryResponse = await entity.query .records({accountId}) - .go({ cursor, limit }); + .go({ cursor, count }); results = results.concat(response.data); cursor = response.cursor; iterations++; diff --git a/www/src/pages/en/queries/pagination.mdx b/www/src/pages/en/queries/pagination.mdx index b0d54f5b..76e61c46 100644 --- a/www/src/pages/en/queries/pagination.mdx +++ b/www/src/pages/en/queries/pagination.mdx @@ -22,7 +22,11 @@ All ElectroDB `query` and `scan` operations return a `cursor`, which is a string The terminal method `go()` accepts a `cursor` when executing a `query` or `scan` to continue paginating for more results. Pass the cursor from the previous query to your next query and ElectroDB will continue its pagination where it left off. -> To limit the number of items ElectroDB will retrieve, read more about the [Query Options](/en/core-concepts/execution-queries) `pages` and `limit`. +> To limit the number of items ElectroDB will retrieve, read more about the [Execution Options](/en/core-concepts/execution-queries) `pages` and `limit`. + +## Limit + +The execution option `limit` allows you to specify a number of items you'd like to receive. If the number of items returned in a single request to DynamoDB is above the `limit` provided, ElectroDB will paginate until the limit is reached. When using `limit` with `.params()` the DynamoDB `Limit` parameter will be applied. ### Entities