From 763c0e6ad50f15677b5f535999f93f122c84b583 Mon Sep 17 00:00:00 2001 From: Bob Evans Date: Thu, 2 Jan 2025 11:21:21 -0500 Subject: [PATCH] feat: Added instrumentation for `@opensearch-projects/opensearch` v2.1.0+ (#2850) --- docker-compose.yml | 22 + .../@opensearch-project/opensearch.js | 161 ++++++ lib/instrumentations.js | 1 + lib/shim/datastore-shim.js | 1 + test/lib/params.js | 2 + test/unit/instrumentation/opensearch.test.js | 154 ++++++ test/versioned/opensearch/newrelic.js | 21 + test/versioned/opensearch/opensearch.test.js | 475 ++++++++++++++++++ test/versioned/opensearch/package.json | 22 + 9 files changed, 859 insertions(+) create mode 100644 lib/instrumentation/@opensearch-project/opensearch.js create mode 100644 test/unit/instrumentation/opensearch.test.js create mode 100644 test/versioned/opensearch/newrelic.js create mode 100644 test/versioned/opensearch/opensearch.test.js create mode 100644 test/versioned/opensearch/package.json diff --git a/docker-compose.yml b/docker-compose.yml index 463b986c72..ddbbbd62bf 100755 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -21,6 +21,28 @@ services: interval: 30s timeout: 10s retries: 5 + + opensearch: + container_name: nr_node_opensearch + image: opensearchproject/opensearch:2.1.0 + environment: + - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + # Disable password + - "DISABLE_SECURITY_PLUGIN=true" + # Set cluster to single node + - "discovery.type=single-node" + # Disable high watermarks, used in CI as the runner is constrained on disk space + - "cluster.routing.allocation.disk.threshold_enabled=false" + - "network.host=_site_" + - "transport.host=127.0.0.1" + - "http.host=0.0.0.0" + ports: + - "9201:9200" + healthcheck: + test: ["CMD", "curl", "-f", "http://127.0.0.1:9200"] + interval: 30s + timeout: 10s + retries: 5 # Kafka setup based on the e2e tests in node-rdkafka. Needs both the # `zookeeper` and `kafka` services. diff --git a/lib/instrumentation/@opensearch-project/opensearch.js b/lib/instrumentation/@opensearch-project/opensearch.js new file mode 100644 index 0000000000..b1593114de --- /dev/null +++ b/lib/instrumentation/@opensearch-project/opensearch.js @@ -0,0 +1,161 @@ +/* + * Copyright 2024 New Relic Corporation. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' + +const { QuerySpec } = require('../../shim/specs') +const semver = require('semver') +const logger = require('../../logger').child({ component: 'OpenSearch' }) +const { isNotEmpty } = require('../../util/objects') + +/** + * Instruments the `@opensearch-project/opensearch` module. This function is + * passed to `onRequire` when instantiating instrumentation. + * + * @param {object} _agent New Relic agent + * @param {object} opensearch resolved module + * @param {string} _moduleName string representation of require/import path + * @param {object} shim New Relic shim + * @returns {void} + */ +module.exports = function initialize(_agent, opensearch, _moduleName, shim) { + const pkgVersion = shim.pkgVersion + if (semver.lt(pkgVersion, '2.1.0')) { + shim && + shim.logger.debug( + `Opensearch support is for versions 2.1.0 and above. Not instrumenting ${pkgVersion}.` + ) + return + } + + shim.setDatastore(shim.OPENSEARCH) + shim.setParser(queryParser) + + shim.recordQuery( + opensearch.Transport.prototype, + 'request', + function wrapQuery(shim, _, __, args) { + const ctx = this + return new QuerySpec({ + query: JSON.stringify(args?.[0]), + promise: true, + opaque: true, + inContext: function inContext() { + getConnection.call(ctx, shim) + } + }) + } + ) +} + +/** + * Parses the parameters sent to opensearch for collection, + * method, and query + * + * @param {object} params Query object received by the datashim. + * Required properties: path {string}, method {string}. + * Optional properties: querystring {string}, body {object}, and + * bulkBody {object} + * @returns {object} consisting of collection {string}, operation {string}, + * and query {string} + */ +function queryParser(params) { + params = JSON.parse(params) + const { collection, operation } = parsePath(params.path, params.method) + + // the substance of the query may be in querystring or in body. + let queryParam = {} + if (isNotEmpty(params.querystring)) { + queryParam = params.querystring + } + // let body or bulkBody override querystring, as some requests have both + if (isNotEmpty(params.body)) { + queryParam = params.body + } else if (Array.isArray(params.bulkBody) && params.bulkBody.length) { + queryParam = params.bulkBody + } + // The helper interface provides a simpler API: + + const query = JSON.stringify(queryParam) + + return { + collection, + operation, + query + } +} + +/** + * Convenience function for parsing the params.path sent to the queryParser + * for normalized collection and operation + * + * @param {string} pathString params.path supplied to the query parser + * @param {string} method http method called by @opensearch-project/opensearch + * @returns {object} consisting of collection {string} and operation {string} + */ +function parsePath(pathString, method) { + let collection + let operation + const defaultCollection = 'any' + const actions = { + GET: 'get', + PUT: 'create', + POST: 'create', + DELETE: 'delete', + HEAD: 'exists' + } + const suffix = actions[method] + + try { + const path = pathString.split('/') + if (method === 'PUT' && path.length === 2) { + collection = path?.[1] || defaultCollection + operation = `index.create` + return { collection, operation } + } + path.forEach((segment, idx) => { + const prev = idx - 1 + let opname + if (segment === '_search') { + collection = path?.[prev] || defaultCollection + operation = `search` + } else if (segment[0] === '_') { + opname = segment.substring(1) + collection = path?.[prev] || defaultCollection + operation = `${opname}.${suffix}` + } + }) + if (!operation && !collection) { + // likely creating an index--no underscore segments + collection = path?.[1] || defaultCollection + operation = `index.${suffix}` + } + } catch (e) { + logger.warn('Failed to parse path for operation and collection. Using defaults') + logger.warn(e) + collection = defaultCollection + operation = 'unknown' + } + + return { collection, operation } +} + +/** + * Convenience function for deriving connection information from + * opensearch + * + * @param {object} shim The New Relic datastore-shim + * @returns {Function} captureInstanceAttributes method of shim + */ +function getConnection(shim) { + const connectionPool = this.connectionPool.connections[0] + const host = connectionPool.url.host.split(':') + const port = connectionPool.url.port || host?.[1] + return shim.captureInstanceAttributes(host[0], port) +} + +module.exports.queryParser = queryParser +module.exports.parsePath = parsePath +module.exports.getConnection = getConnection diff --git a/lib/instrumentations.js b/lib/instrumentations.js index 6e1be074c5..47dbfb0aea 100644 --- a/lib/instrumentations.js +++ b/lib/instrumentations.js @@ -11,6 +11,7 @@ const InstrumentationDescriptor = require('./instrumentation-descriptor') module.exports = function instrumentations() { return { '@elastic/elasticsearch': { type: InstrumentationDescriptor.TYPE_DATASTORE }, + '@opensearch-project/opensearch': { type: InstrumentationDescriptor.TYPE_DATASTORE }, '@grpc/grpc-js': { module: './instrumentation/grpc-js' }, '@hapi/hapi': { type: InstrumentationDescriptor.TYPE_WEB_FRAMEWORK }, '@hapi/vision': { type: InstrumentationDescriptor.TYPE_WEB_FRAMEWORK }, diff --git a/lib/shim/datastore-shim.js b/lib/shim/datastore-shim.js index 70d103ee0e..b883e17390 100644 --- a/lib/shim/datastore-shim.js +++ b/lib/shim/datastore-shim.js @@ -38,6 +38,7 @@ const DATASTORE_NAMES = { MONGODB: 'MongoDB', MYSQL: 'MySQL', NEPTUNE: 'Neptune', + OPENSEARCH: 'OpenSearch', POSTGRES: 'Postgres', REDIS: 'Redis', PRISMA: 'Prisma' diff --git a/test/lib/params.js b/test/lib/params.js index 03095d0031..785ad72b2b 100644 --- a/test/lib/params.js +++ b/test/lib/params.js @@ -28,6 +28,8 @@ module.exports = { elastic_host: process.env.NR_NODE_TEST_ELASTIC_HOST || 'localhost', elastic_port: process.env.NR_NODE_TEST_ELASTIC_PORT || 9200, + opensearch_host: process.env.NR_NODE_TEST_OPENSEARCH_HOST || 'localhost', + opensearch_port: process.env.NR_NODE_TEST_OPENSEARCH_PORT || 9201, postgres_host: process.env.NR_NODE_TEST_POSTGRES_HOST || 'localhost', postgres_port: process.env.NR_NODE_TEST_POSTGRES_PORT || 5432, diff --git a/test/unit/instrumentation/opensearch.test.js b/test/unit/instrumentation/opensearch.test.js new file mode 100644 index 0000000000..3589385a26 --- /dev/null +++ b/test/unit/instrumentation/opensearch.test.js @@ -0,0 +1,154 @@ +/* + * Copyright 2023 New Relic Corporation. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' + +const test = require('node:test') +const assert = require('node:assert') +const { + parsePath, + queryParser +} = require('../../../lib/instrumentation/@opensearch-project/opensearch') +const instrumentation = require('../../../lib/instrumentation/@opensearch-project/opensearch') +const methods = [ + { name: 'GET', expected: 'get' }, + { name: 'PUT', expected: 'create' }, + { name: 'POST', expected: 'create' }, + { name: 'DELETE', expected: 'delete' }, + { name: 'HEAD', expected: 'exists' } +] + +test('should log warning if version is not supported', async () => { + const shim = { + pkgVersion: '2.0.0', + logger: { + debug(msg) { + assert.equal( + msg, + 'Opensearch support is for versions 2.1.0 and above. Not instrumenting 2.0.0.' + ) + } + } + } + instrumentation({}, {}, '@opensearch-project/opensearch', shim) +}) +test('parsePath should behave as expected', async (t) => { + await t.test('indices', async function () { + const path = '/indexName' + methods.forEach((m) => { + const { collection, operation } = parsePath(path, m.name) + const expectedOp = `index.${m.expected}` + assert.equal(collection, 'indexName', `index should be 'indexName'`) + assert.equal(operation, expectedOp, 'operation should include index and method') + }) + }) + await t.test('search of one index', async function () { + const path = '/indexName/_search' + methods.forEach((m) => { + const { collection, operation } = parsePath(path, m.name) + const expectedOp = `search` + assert.equal(collection, 'indexName', `index should be 'indexName'`) + assert.equal(operation, expectedOp, `operation should be 'search'`) + }) + }) + await t.test('search of all indices', async function () { + const path = '/_search/' + methods.forEach((m) => { + if (m.name === 'PUT') { + // skip PUT + return + } + const { collection, operation } = parsePath(path, m.name) + const expectedOp = `search` + assert.equal(collection, 'any', 'index should be `any`') + assert.equal(operation, expectedOp, `operation should match ${expectedOp}`) + }) + }) + await t.test('doc', async function () { + const path = '/indexName/_doc/testKey' + methods.forEach((m) => { + const { collection, operation } = parsePath(path, m.name) + const expectedOp = `doc.${m.expected}` + assert.equal(collection, 'indexName', `index should be 'indexName'`) + assert.equal(operation, expectedOp, `operation should match ${expectedOp}`) + }) + }) + await t.test('path is /', async function () { + const path = '/' + methods.forEach((m) => { + const { collection, operation } = parsePath(path, m.name) + const expectedOp = `index.${m.expected}` + assert.equal(collection, 'any', 'index should be `any`') + assert.equal(operation, expectedOp, `operation should match ${expectedOp}`) + }) + }) + await t.test( + 'should provide sensible defaults when path is {} and parser encounters an error', + function () { + const path = {} + methods.forEach((m) => { + const { collection, operation } = parsePath(path, m.name) + const expectedOp = `unknown` + assert.equal(collection, 'any', 'index should be `any`') + assert.equal(operation, expectedOp, `operation should match '${expectedOp}'`) + }) + } + ) +}) + +test('queryParser should behave as expected', async (t) => { + await t.test('given a querystring, it should use that for query', () => { + const params = JSON.stringify({ + path: '/_search', + method: 'GET', + querystring: { q: 'searchterm' } + }) + const expected = { + collection: 'any', + operation: 'search', + query: JSON.stringify({ q: 'searchterm' }) + } + const parseParams = queryParser(params) + assert.deepEqual(parseParams, expected, 'queryParser should handle query strings') + }) + await t.test('given a body, it should use that for query', () => { + const params = JSON.stringify({ + path: '/_search', + method: 'POST', + body: { match: { body: 'document' } } + }) + const expected = { + collection: 'any', + operation: 'search', + query: JSON.stringify({ match: { body: 'document' } }) + } + const parseParams = queryParser(params) + assert.deepEqual(parseParams, expected, 'queryParser should handle query body') + }) + await t.test('given a bulkBody, it should use that for query', () => { + const params = JSON.stringify({ + path: '/_msearch', + method: 'POST', + bulkBody: [ + {}, // cross-index searches have can have an empty metadata section + { query: { match: { body: 'sixth' } } }, + {}, + { query: { match: { body: 'bulk' } } } + ] + }) + const expected = { + collection: 'any', + operation: 'msearch.create', + query: JSON.stringify([ + {}, // cross-index searches have can have an empty metadata section + { query: { match: { body: 'sixth' } } }, + {}, + { query: { match: { body: 'bulk' } } } + ]) + } + const parseParams = queryParser(params) + assert.deepEqual(parseParams, expected, 'queryParser should handle query body') + }) +}) diff --git a/test/versioned/opensearch/newrelic.js b/test/versioned/opensearch/newrelic.js new file mode 100644 index 0000000000..e98a8afddb --- /dev/null +++ b/test/versioned/opensearch/newrelic.js @@ -0,0 +1,21 @@ +/* + * Copyright 2020 New Relic Corporation. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' + +exports.config = { + app_name: ['opensearch test'], + license_key: 'license key here', + utilization: { + detect_aws: false, + detect_pcf: false, + detect_azure: false, + detect_gcp: false, + detect_docker: false + }, + logging: { + enabled: true + } +} diff --git a/test/versioned/opensearch/opensearch.test.js b/test/versioned/opensearch/opensearch.test.js new file mode 100644 index 0000000000..1e32278ca9 --- /dev/null +++ b/test/versioned/opensearch/opensearch.test.js @@ -0,0 +1,475 @@ +/* + * Copyright 2024 New Relic Corporation. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' +const test = require('node:test') +const assert = require('node:assert') +const helper = require('../../lib/agent_helper') +const params = require('../../lib/params') +const urltils = require('../../../lib/util/urltils') +const crypto = require('crypto') +const DB_INDEX = `test-${randomString()}` +const DB_INDEX_2 = `test2-${randomString()}` +const SEARCHTERM_1 = randomString() + +function randomString() { + return crypto.randomBytes(5).toString('hex') +} + +function setRequestBody(body) { + return { body } +} +function setBulkBody(body) { + return { + refresh: true, + body + } +} +function setMsearch(body) { + return { body } +} + +test('opensearch instrumentation', async (t) => { + t.beforeEach(async (ctx) => { + const agent = helper.instrumentMockedAgent() + + const METRIC_HOST_NAME = urltils.isLocalhost(params.opensearch_host) + ? agent.config.getHostnameSafe() + : params.opensearch_host + const HOST_ID = METRIC_HOST_NAME + '/' + params.opensearch_port + + // need to capture attributes + agent.config.attributes.enabled = true + + const { Client } = require('@opensearch-project/opensearch') + const client = new Client({ + node: `http://${params.opensearch_host}:${params.opensearch_port}` + }) + + ctx.nr = { + agent, + client, + METRIC_HOST_NAME, + HOST_ID + } + + return Promise.all([ + client.indices.create({ index: DB_INDEX }), + client.indices.create({ index: DB_INDEX_2 }) + ]) + }) + + t.afterEach((ctx) => { + const { agent, client } = ctx.nr + helper.unloadAgent(agent) + return Promise.all([ + client.indices.delete({ index: DB_INDEX }), + client.indices.delete({ index: DB_INDEX_2 }) + ]) + }) + + await t.test('should be able to record creating an index', async (t) => { + const { agent, client } = t.nr + const index = `test-index-${randomString()}` + t.after(async () => { + await client.indices.delete({ index }) + }) + await helper.runInTransaction(agent, async function transactionInScope(transaction) { + assert.ok(transaction, 'transaction should be visible') + await client.indices.create({ index }) + const trace = transaction.trace + assert.ok(trace?.root?.children?.[0], 'trace, trace root, and first child should exist') + const firstChild = trace.root.children[0] + assert.equal( + firstChild.name, + `Datastore/statement/OpenSearch/${index}/index.create`, + 'should record index PUT as create' + ) + }) + }) + + await t.test('should record bulk operations', async (t) => { + const { agent, client } = t.nr + await helper.runInTransaction(agent, async function transactionInScope(transaction) { + await bulkInsert({ client }) + assert.ok(transaction, 'transaction should still be visible after bulk create') + const trace = transaction.trace + assert.ok(trace?.root?.children?.[0], 'trace, trace root, and first child should exist') + const firstChild = trace.root.children[0] + assert.equal( + firstChild.name, + 'Datastore/statement/OpenSearch/any/bulk.create', + 'should record bulk operation' + ) + }) + }) + + await t.test('should record bulk operations triggered by client helpers', async (t) => { + const { agent, client } = t.nr + await helper.runInTransaction(agent, async function transactionInScope(transaction) { + const operations = getBulkData() + await client.helpers.bulk({ + datasource: operations, + onDocument() { + return { + index: { _index: DB_INDEX_2 } + } + }, + refreshOnCompletion: true + }) + assert.ok(transaction, 'transaction should still be visible after bulk create') + const trace = transaction.trace + assert.ok(trace?.root?.children?.[0], 'trace, trace root, and first child should exist') + assert.ok(trace?.root?.children?.[1], 'trace, trace root, and second child should exist') + // helper interface results in a first child of timers.setTimeout, with the second child related to the operation + const secondChild = trace.root.children[1] + assert.equal( + secondChild.name, + 'Datastore/statement/OpenSearch/any/bulk.create', + 'should record bulk operation' + ) + }) + }) + + await t.test('should record search with query string', async function (t) { + const { agent, client, METRIC_HOST_NAME } = t.nr + // enable slow queries + agent.config.transaction_tracer.explain_threshold = 0 + agent.config.transaction_tracer.record_sql = 'raw' + agent.config.slow_sql.enabled = true + await helper.runInTransaction(agent, async function transactionInScope(transaction) { + const expectedQuery = { q: SEARCHTERM_1 } + const search = await client.search({ index: DB_INDEX_2, ...expectedQuery }) + assert.ok(search, 'search should return a result') + assert.ok(transaction, 'transaction should still be visible after search') + const trace = transaction.trace + assert.ok(trace?.root?.children?.[0], 'trace, trace root, and first child should exist') + const firstChild = trace.root.children[0] + assert.equal( + firstChild.name, + `Datastore/statement/OpenSearch/${DB_INDEX_2}/search`, + 'querystring search should be recorded as a search' + ) + const attrs = firstChild.getAttributes() + assert.equal(attrs.product, 'OpenSearch') + assert.equal(attrs.host, METRIC_HOST_NAME) + transaction.end() + assert.ok(agent.queries.samples.size > 0, 'there should be a query sample') + for (const query of agent.queries.samples.values()) { + assert.ok(query.total > 0, 'the samples should have positive duration') + assert.equal( + query.trace.query, + JSON.stringify(expectedQuery), + 'expected query string should have been used' + ) + } + }) + }) + await t.test('should record search with request body', async function (t) { + const { agent, client, METRIC_HOST_NAME } = t.nr + // enable slow queries + agent.config.transaction_tracer.explain_threshold = 0 + agent.config.transaction_tracer.record_sql = 'raw' + agent.config.slow_sql.enabled = true + await helper.runInTransaction(agent, async function transactionInScope(transaction) { + // We expect this content in the trace of the request, but the request body is different in 7 v 8. + const expectedQuery = { query: { match: { body: 'document' } } } + const requestBody = setRequestBody(expectedQuery) + const search = await client.search({ index: DB_INDEX, ...requestBody }) + assert.ok(search, 'search should return a result') + assert.ok(transaction, 'transaction should still be visible after search') + const trace = transaction.trace + assert.ok(trace?.root?.children?.[0], 'trace, trace root, and first child should exist') + const firstChild = trace.root.children[0] + assert.equal( + firstChild.name, + `Datastore/statement/OpenSearch/${DB_INDEX}/search`, + 'search index is specified, so name shows it' + ) + const attrs = firstChild.getAttributes() + assert.equal(attrs.product, 'OpenSearch') + assert.equal(attrs.host, METRIC_HOST_NAME) + assert.equal(attrs.port_path_or_id, `${params.opensearch_port}`) + // TODO: update once instrumentation is properly setting database name + assert.equal(attrs.database_name, 'unknown') + transaction.end() + assert.ok(agent.queries.samples.size > 0, 'there should be a query sample') + for (const query of agent.queries.samples.values()) { + assert.ok(query.total > 0, 'the samples should have positive duration') + assert.equal( + query.trace.query, + JSON.stringify({ ...expectedQuery }), + 'expected query body should have been recorded' + ) + } + }) + }) + + await t.test('should record search across indices', async function (t) { + const { agent, client, METRIC_HOST_NAME } = t.nr + // enable slow queries + agent.config.transaction_tracer.explain_threshold = 0 + agent.config.transaction_tracer.record_sql = 'raw' + agent.config.slow_sql.enabled = true + await helper.runInTransaction(agent, async function transactionInScope(transaction) { + const expectedQuery = { query: { match: { body: 'document' } } } + const requestBody = setRequestBody(expectedQuery) + const search = await client.search({ ...requestBody }) + assert.ok(search, 'search should return a result') + assert.ok(transaction, 'transaction should still be visible after search') + const trace = transaction.trace + assert.ok(trace?.root?.children?.[0], 'trace, trace root, and first child should exist') + const firstChild = trace.root.children[0] + assert.equal( + firstChild.name, + 'Datastore/statement/OpenSearch/any/search', + 'child name on all indices should show search' + ) + const attrs = firstChild.getAttributes() + assert.equal(attrs.product, 'OpenSearch') + assert.equal(attrs.host, METRIC_HOST_NAME) + transaction.end() + assert.ok(agent.queries.samples.size > 0, 'there should be a query sample') + for (const query of agent.queries.samples.values()) { + assert.ok(query.total > 0, 'the samples should have positive duration') + assert.equal( + query.trace.query, + JSON.stringify({ ...expectedQuery }), + 'expected query body should have been recorded' + ) + } + }) + }) + await t.test('should record msearch', async function (t) { + const { agent, client, METRIC_HOST_NAME } = t.nr + agent.config.transaction_tracer.explain_threshold = 0 + agent.config.transaction_tracer.record_sql = 'raw' + agent.config.slow_sql.enabled = true + await bulkInsert({ client }) + await helper.runInTransaction(agent, async function transactionInScope(transaction) { + const expectedQuery = [ + {}, // cross-index searches have can have an empty metadata section + { query: { match: { body: SEARCHTERM_1 } } }, + {}, + { query: { match: { body: 'bulk' } } } + ] + const requestBody = setMsearch(expectedQuery) + const search = await client.msearch(requestBody) + const results = search?.body?.responses + + assert.ok(results, 'msearch should return results') + assert.equal(results?.length, 2, 'there should be two responses--one per search') + assert.equal(results?.[0]?.hits?.hits?.length, 1, 'first search should return one result') + assert.equal(results?.[1]?.hits?.hits?.length, 8, 'second search should return ten results') + assert.ok(transaction, 'transaction should still be visible after search') + const trace = transaction.trace + assert.ok(trace?.root?.children?.[0], 'trace, trace root, and first child should exist') + const firstChild = trace.root.children[0] + assert.equal( + firstChild.name, + 'Datastore/statement/OpenSearch/any/msearch.create', + 'child name should show msearch' + ) + const attrs = firstChild.getAttributes() + assert.equal(attrs.product, 'OpenSearch') + assert.equal(attrs.host, METRIC_HOST_NAME) + transaction.end() + assert.ok(agent.queries.samples.size > 0, 'there should be a query sample') + for (const query of agent.queries.samples.values()) { + assert.ok(query.total > 0, 'the samples should have positive duration') + assert.equal( + query.trace.query, + JSON.stringify(expectedQuery), + 'expected msearch query should have been recorded' + ) + } + }) + }) + + await t.test('should record msearch via helpers', async function (t) { + const { agent, client } = t.nr + agent.config.transaction_tracer.explain_threshold = 0 + agent.config.transaction_tracer.record_sql = 'raw' + agent.config.slow_sql.enabled = true + await bulkInsert({ client }) + await helper.runInTransaction(agent, async function transactionInScope(transaction) { + const m = client.helpers.msearch() + const searchA = await m.search({}, { query: { match: { body: SEARCHTERM_1 } } }) + const searchB = await m.search({}, { query: { match: { body: 'bulk' } } }) + const resultsA = searchA?.body?.hits + const resultsB = searchB?.body?.hits + + assert.ok(resultsA, 'msearch for sixth should return results') + assert.ok(resultsB, 'msearch for bulk should return results') + assert.equal(resultsA?.hits?.length, 1, 'first search should return one result') + assert.equal(resultsB?.hits?.length, 8, 'second search should return ten results') + assert.ok(transaction, 'transaction should still be visible after search') + const trace = transaction.trace + assert.ok(trace?.root?.children?.[0], 'trace, trace root, and first child should exist') + const firstChild = trace.root.children[0] + assert.equal( + firstChild.name, + 'timers.setTimeout', + 'helpers, for some reason, generates a setTimeout metric first' + ) + transaction.end() + assert.ok(agent.queries.samples.size > 0, 'there should be a query sample') + for (const query of agent.queries.samples.values()) { + // which query gets captured in helper.msearch is non-deterministic + assert.ok(query.total > 0, 'the samples should have positive duration') + } + }) + }) + + await t.test('should create correct metrics', async function (t) { + const { agent, client, HOST_ID } = t.nr + const id = `key-${randomString()}` + await helper.runInTransaction(agent, async function transactionInScope(transaction) { + const documentProp = setRequestBody({ + document: { + title: 'second document', + body: 'body of the second document' + } + }) + await client.index({ + index: DB_INDEX, + id, + ...documentProp + }) + + // check metrics/methods for "exists" queries + await client.exists({ id, index: DB_INDEX }) + await client.get({ id, index: DB_INDEX }) + const searchQuery = setRequestBody({ query: { match: { body: 'document' } } }) + await client.search(searchQuery) + await client.delete({ id, index: DB_INDEX }) + transaction.end() + + const unscoped = transaction.metrics.unscoped + const expected = { + 'Datastore/all': 5, + 'Datastore/allWeb': 5, + 'Datastore/OpenSearch/all': 5, + 'Datastore/OpenSearch/allWeb': 5, + 'Datastore/operation/OpenSearch/doc.create': 1, + 'Datastore/operation/OpenSearch/doc.get': 1, + 'Datastore/operation/OpenSearch/doc.exists': 1, + 'Datastore/operation/OpenSearch/search': 1, + [`Datastore/statement/OpenSearch/${DB_INDEX}/doc.create`]: 1, + [`Datastore/statement/OpenSearch/${DB_INDEX}/doc.get`]: 1, + [`Datastore/statement/OpenSearch/${DB_INDEX}/doc.exists`]: 1, + [`Datastore/statement/OpenSearch/${DB_INDEX}/doc.delete`]: 1, + 'Datastore/statement/OpenSearch/any/search': 1 + } + expected['Datastore/instance/OpenSearch/' + HOST_ID] = 5 + checkMetrics(unscoped, expected) + }) + }) + + await t.test('should not add instance attributes/metrics when disabled', async function (t) { + const { agent, client, HOST_ID } = t.nr + + // disable + agent.config.datastore_tracer.instance_reporting.enabled = false + agent.config.datastore_tracer.database_name_reporting.enabled = false + + await helper.runInTransaction(agent, async function transactionInScope(transaction) { + const documentProp = setRequestBody({ + document: { + title: 'third document title', + body: 'body of the third document' + } + }) + + await client.index({ + index: DB_INDEX, + id: 'testkey3', + ...documentProp + }) + + const createSegment = transaction.trace.root.children[0] + const attributes = createSegment.getAttributes() + assert.equal(attributes.host, undefined, 'should not have host attribute') + assert.equal(attributes.port_path_or_id, undefined, 'should not have port attribute') + assert.equal(attributes.database_name, undefined, 'should not have db name attribute') + + transaction.end() + const unscoped = transaction.metrics.unscoped + assert.equal( + unscoped['Datastore/instance/OpenSearch/' + HOST_ID], + undefined, + 'should not have instance metric' + ) + }) + }) + await t.test('edge cases', async (t) => { + const { agent, client } = t.nr + await helper.runInTransaction(agent, async function transactionInScope(transaction) { + try { + await client.indices.create({ index: '_search' }) + } catch (e) { + assert.ok(e, 'should not be able to create an index named _search') + } + const firstChild = transaction?.trace?.root?.children[0] + assert.equal( + firstChild.name, + 'Datastore/statement/OpenSearch/_search/index.create', + 'should record the attempted index creation without altering the index name' + ) + }) + }) + await t.test('index existence check should not error', async (t) => { + const { agent, client } = t.nr + await helper.runInTransaction(agent, async function transactionInScope() { + try { + await client.indices.exists({ index: DB_INDEX }) + } catch (e) { + assert.ok(!e, 'should be able to check for index existence') + } + }) + }) +}) + +function getBulkData(includeIndex) { + let operations = [ + { title: 'First Bulk Doc', body: 'Content of first bulk document' }, + { title: 'Second Bulk Doc', body: 'Content of second bulk document.' }, + { title: 'Third Bulk Doc', body: 'Content of third bulk document.' }, + { title: 'Fourth Bulk Doc', body: 'Content of fourth bulk document.' }, + { title: 'Fifth Bulk Doc', body: 'Content of fifth bulk document' }, + { + title: 'Sixth Bulk Doc', + body: `Content of sixth bulk document. Has search term: ${SEARCHTERM_1}` + }, + { title: 'Seventh Bulk Doc', body: 'Content of seventh bulk document.' }, + { title: 'Eighth Bulk Doc', body: 'Content of eighth bulk document.' } + ] + + if (includeIndex) { + operations = operations.flatMap((doc, i) => { + return [{ index: { _index: i < 4 ? DB_INDEX : DB_INDEX_2 } }, doc] + }) + } + + return operations +} + +async function bulkInsert({ client }) { + const operations = getBulkData(true) + await client.bulk(setBulkBody(operations)) +} + +function checkMetrics(metrics, expected) { + Object.keys(expected).forEach(function (name) { + assert.ok(metrics[name], 'should have metric ' + name) + if (metrics[name]) { + assert.equal( + metrics[name].callCount, + expected[name], + 'should have ' + expected[name] + ' calls for ' + name + ) + } + }) +} diff --git a/test/versioned/opensearch/package.json b/test/versioned/opensearch/package.json new file mode 100644 index 0000000000..b43662039c --- /dev/null +++ b/test/versioned/opensearch/package.json @@ -0,0 +1,22 @@ +{ + "name": "opensearch-tests", + "targets": [{"name":"@opensearch-project/opensearch","minAgentVersion":"12.10.0"}], + "version": "0.0.0", + "private": true, + "engines": { + "node": ">=18" + }, + "tests": [ + { + "engines": { + "node": ">=18" + }, + "dependencies": { + "@opensearch-project/opensearch": ">=2.1.0" + }, + "files": [ + "opensearch.test.js" + ] + } + ] +}