From c01d72445c838e5ffd1db0990f79e6c8bcb1bc91 Mon Sep 17 00:00:00 2001 From: Bob Evans Date: Fri, 6 Dec 2024 14:59:38 -0500 Subject: [PATCH] feat: Added segment synthesis for db client otel spans to db trace (#2820) --- lib/db/query-parsers/elasticsearch.js | 102 ++++++++++++++ lib/db/query-parsers/mongodb.js | 29 ++++ lib/instrumentation/@elastic/elasticsearch.js | 97 +------------- lib/instrumentation/mongodb/v4-mongo.js | 22 +-- lib/otel/rules.json | 4 + lib/otel/segment-synthesis.js | 66 ++++++++- .../query-parsers}/elasticsearch.test.js | 2 +- test/unit/lib/otel/fixtures/db-sql.js | 61 +++++++++ test/unit/lib/otel/fixtures/http-client.js | 16 +++ test/unit/lib/otel/fixtures/index.js | 25 ++++ test/unit/lib/otel/fixtures/span.js | 17 +++ .../unit/lib/otel/segment-synthesizer.test.js | 125 +++++++++++++----- 12 files changed, 413 insertions(+), 153 deletions(-) create mode 100644 lib/db/query-parsers/elasticsearch.js create mode 100644 lib/db/query-parsers/mongodb.js rename test/unit/{instrumentation => db/query-parsers}/elasticsearch.test.js (97%) create mode 100644 test/unit/lib/otel/fixtures/db-sql.js create mode 100644 test/unit/lib/otel/fixtures/http-client.js create mode 100644 test/unit/lib/otel/fixtures/index.js create mode 100644 test/unit/lib/otel/fixtures/span.js diff --git a/lib/db/query-parsers/elasticsearch.js b/lib/db/query-parsers/elasticsearch.js new file mode 100644 index 0000000000..2a4427a9b6 --- /dev/null +++ b/lib/db/query-parsers/elasticsearch.js @@ -0,0 +1,102 @@ +/* + * Copyright 2024 New Relic Corporation. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' +const logger = require('../../logger').child({ component: 'elasticsearch_query_parser' }) +const { isNotEmpty } = require('../../util/objects') + +/** + * Parses the parameters sent to elasticsearch for collection, + * method, and query + * + * @param {object} params Query object received by the datashim. + * Required properties: path {string}, method {string}. + * Optional properties: querystring {string}, body {object}, and + * bulkBody {object} + * @returns {object} consisting of collection {string}, operation {string}, + * and query {string} + */ +function queryParser(params) { + params = JSON.parse(params) + const { collection, operation } = parsePath(params.path, params.method) + + // the substance of the query may be in querystring or in body. + let queryParam = {} + if (isNotEmpty(params.querystring)) { + queryParam = params.querystring + } + // let body or bulkBody override querystring, as some requests have both + if (isNotEmpty(params.body)) { + queryParam = params.body + } else if (Array.isArray(params.bulkBody) && params.bulkBody.length) { + queryParam = params.bulkBody + } + // The helper interface provides a simpler API: + + const query = JSON.stringify(queryParam) + + return { + collection, + operation, + query + } +} + +/** + * Convenience function for parsing the params.path sent to the queryParser + * for normalized collection and operation + * + * @param {string} pathString params.path supplied to the query parser + * @param {string} method http method called by @elastic/elasticsearch + * @returns {object} consisting of collection {string} and operation {string} + */ +function parsePath(pathString, method) { + let collection + let operation + const defaultCollection = 'any' + const actions = { + GET: 'get', + PUT: 'create', + POST: 'create', + DELETE: 'delete', + HEAD: 'exists' + } + const suffix = actions[method] + + try { + const path = pathString.split('/') + if (method === 'PUT' && path.length === 2) { + collection = path?.[1] || defaultCollection + operation = `index.create` + return { collection, operation } + } + path.forEach((segment, idx) => { + const prev = idx - 1 + let opname + if (segment === '_search') { + collection = path?.[prev] || defaultCollection + operation = `search` + } else if (segment[0] === '_') { + opname = segment.substring(1) + collection = path?.[prev] || defaultCollection + operation = `${opname}.${suffix}` + } + }) + if (!operation && !collection) { + // likely creating an index--no underscore segments + collection = path?.[1] || defaultCollection + operation = `index.${suffix}` + } + } catch (e) { + logger.warn('Failed to parse path for operation and collection. Using defaults') + logger.warn(e) + collection = defaultCollection + operation = 'unknown' + } + + return { collection, operation } +} + +module.exports = { queryParser, parsePath } diff --git a/lib/db/query-parsers/mongodb.js b/lib/db/query-parsers/mongodb.js new file mode 100644 index 0000000000..c07cbed872 --- /dev/null +++ b/lib/db/query-parsers/mongodb.js @@ -0,0 +1,29 @@ +/* + * Copyright 2024 New Relic Corporation. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' + +/** + * parser used to grab the collection and operation + * from a running query + * + * @param {object} operation mongodb operation + * @returns {object} { operation, collection } parsed operation and collection + */ +function queryParser(operation) { + let collection = this.collectionName || 'unknown' + + // cursor methods have collection on namespace.collection + if (this?.namespace?.collection) { + collection = this.namespace.collection + // (un)ordered bulk operations have collection on different key + } else if (this?.s?.collection?.collectionName) { + collection = this.s.collection.collectionName + } + + return { operation, collection } +} + +module.exports = queryParser diff --git a/lib/instrumentation/@elastic/elasticsearch.js b/lib/instrumentation/@elastic/elasticsearch.js index bc88e2a417..15fe7f1699 100644 --- a/lib/instrumentation/@elastic/elasticsearch.js +++ b/lib/instrumentation/@elastic/elasticsearch.js @@ -7,8 +7,7 @@ const { QuerySpec } = require('../../shim/specs') const semver = require('semver') -const logger = require('../../logger').child({ component: 'ElasticSearch' }) -const { isNotEmpty } = require('../../util/objects') +const { queryParser } = require('../../db/query-parsers/elasticsearch') /** * Instruments the `@elastic/elasticsearch` module. This function is @@ -46,98 +45,6 @@ module.exports = function initialize(_agent, elastic, _moduleName, shim) { }) } -/** - * Parses the parameters sent to elasticsearch for collection, - * method, and query - * - * @param {object} params Query object received by the datashim. - * Required properties: path {string}, method {string}. - * Optional properties: querystring {string}, body {object}, and - * bulkBody {object} - * @returns {object} consisting of collection {string}, operation {string}, - * and query {string} - */ -function queryParser(params) { - params = JSON.parse(params) - const { collection, operation } = parsePath(params.path, params.method) - - // the substance of the query may be in querystring or in body. - let queryParam = {} - if (isNotEmpty(params.querystring)) { - queryParam = params.querystring - } - // let body or bulkBody override querystring, as some requests have both - if (isNotEmpty(params.body)) { - queryParam = params.body - } else if (Array.isArray(params.bulkBody) && params.bulkBody.length) { - queryParam = params.bulkBody - } - // The helper interface provides a simpler API: - - const query = JSON.stringify(queryParam) - - return { - collection, - operation, - query - } -} - -/** - * Convenience function for parsing the params.path sent to the queryParser - * for normalized collection and operation - * - * @param {string} pathString params.path supplied to the query parser - * @param {string} method http method called by @elastic/elasticsearch - * @returns {object} consisting of collection {string} and operation {string} - */ -function parsePath(pathString, method) { - let collection - let operation - const defaultCollection = 'any' - const actions = { - GET: 'get', - PUT: 'create', - POST: 'create', - DELETE: 'delete', - HEAD: 'exists' - } - const suffix = actions[method] - - try { - const path = pathString.split('/') - if (method === 'PUT' && path.length === 2) { - collection = path?.[1] || defaultCollection - operation = `index.create` - return { collection, operation } - } - path.forEach((segment, idx) => { - const prev = idx - 1 - let opname - if (segment === '_search') { - collection = path?.[prev] || defaultCollection - operation = `search` - } else if (segment[0] === '_') { - opname = segment.substring(1) - collection = path?.[prev] || defaultCollection - operation = `${opname}.${suffix}` - } - }) - if (!operation && !collection) { - // likely creating an index--no underscore segments - collection = path?.[1] || defaultCollection - operation = `index.${suffix}` - } - } catch (e) { - logger.warn('Failed to parse path for operation and collection. Using defaults') - logger.warn(e) - collection = defaultCollection - operation = 'unknown' - } - - return { collection, operation } -} - /** * Convenience function for deriving connection information from * elasticsearch @@ -152,6 +59,4 @@ function getConnection(shim) { return shim.captureInstanceAttributes(host[0], port) } -module.exports.queryParser = queryParser -module.exports.parsePath = parsePath module.exports.getConnection = getConnection diff --git a/lib/instrumentation/mongodb/v4-mongo.js b/lib/instrumentation/mongodb/v4-mongo.js index a36c44fcde..0148674531 100644 --- a/lib/instrumentation/mongodb/v4-mongo.js +++ b/lib/instrumentation/mongodb/v4-mongo.js @@ -13,27 +13,7 @@ const { instrumentDb, parseAddress } = require('./common') - -/** - * parser used to grab the collection and operation - * from a running query - * - * @param {object} operation mongodb operation - * @returns {object} { operation, collection } parsed operation and collection - */ -function queryParser(operation) { - let collection = this.collectionName || 'unknown' - - // cursor methods have collection on namespace.collection - if (this?.namespace?.collection) { - collection = this.namespace.collection - // (un)ordered bulk operations have collection on different key - } else if (this?.s?.collection?.collectionName) { - collection = this.s.collection.collectionName - } - - return { operation, collection } -} +const queryParser = require('../../db/query-parsers/mongodb') /** * `commandStarted` handler used to diff --git a/lib/otel/rules.json b/lib/otel/rules.json index 37ba9ce2a6..3500109492 100644 --- a/lib/otel/rules.json +++ b/lib/otel/rules.json @@ -1,6 +1,7 @@ [ { "name": "OtelHttpServer1_23", + "type": "server", "matcher": { "required_metric_names": [ "http.server.request.duration" @@ -41,6 +42,7 @@ }, { "name": "OtelHttpServer1_20", + "type": "server", "matcher": { "required_metric_names": [ "http.server.duration" @@ -81,6 +83,7 @@ }, { "name": "OtelRpcServer1_20", + "type": "server", "matcher": { "required_metric_names": [ "rpc.server.duration" @@ -121,6 +124,7 @@ }, { "name": "FallbackServer", + "type": "server", "matcher": { "required_metric_names": [ "rpc.server.duration", diff --git a/lib/otel/segment-synthesis.js b/lib/otel/segment-synthesis.js index 7441c53b09..dddc89ff55 100644 --- a/lib/otel/segment-synthesis.js +++ b/lib/otel/segment-synthesis.js @@ -7,7 +7,16 @@ const { RulesEngine } = require('./rules') const defaultLogger = require('../logger').child({ component: 'segment-synthesizer' }) const NAMES = require('../metrics/names') -const { SEMATTRS_HTTP_HOST } = require('@opentelemetry/semantic-conventions') +const { + SEMATTRS_HTTP_HOST, + SEMATTRS_DB_MONGODB_COLLECTION, + SEMATTRS_DB_SYSTEM, + SEMATTRS_DB_SQL_TABLE, + SEMATTRS_DB_OPERATION, + SEMATTRS_DB_STATEMENT, + DbSystemValues +} = require('@opentelemetry/semantic-conventions') +const parseSql = require('../db/query-parsers/sql') class SegmentSynthesizer { constructor(agent, { logger = defaultLogger } = {}) { @@ -27,10 +36,13 @@ class SegmentSynthesizer { return } - if (rule?.type === 'external') { + if (rule.type === 'external') { return this.createExternalSegment(otelSpan) + } else if (rule.type === 'db') { + return this.createDatabaseSegment(otelSpan) } - this.logger.debug('Found type: %s, no synthesize rule currently built', rule.type) + + this.logger.debug('Found type: %s, no synthesis rule currently built', rule.type) } // TODO: should we move these to somewhere else and use in the places @@ -45,6 +57,54 @@ class SegmentSynthesizer { transaction: context.transaction }) } + + parseStatement(otelSpan, system) { + let table = otelSpan.attributes[SEMATTRS_DB_SQL_TABLE] + let operation = otelSpan.attributes[SEMATTRS_DB_OPERATION] + const statement = otelSpan.attributes[SEMATTRS_DB_STATEMENT] + if (statement && !(table || operation)) { + const parsed = parseSql({ sql: statement }) + if (parsed.operation && !operation) { + operation = parsed.operation + } + + if (parsed.collection && !table) { + table = parsed.collection + } + } + if (system === DbSystemValues.MONGODB) { + table = otelSpan.attributes[SEMATTRS_DB_MONGODB_COLLECTION] + } + + if (system === DbSystemValues.REDIS && statement) { + ;[operation] = statement.split(' ') + } + + table = table || 'Unknown' + operation = operation || 'Unknown' + + return { operation, table } + } + + // TODO: This probably has some holes + // I did analysis and tried to apply the best logic + // to extract table/operation + createDatabaseSegment(otelSpan) { + const context = this.agent.tracer.getContext() + const system = otelSpan.attributes[SEMATTRS_DB_SYSTEM] + const { operation, table } = this.parseStatement(otelSpan, system) + + let name = `Datastore/statement/${system}/${table}/${operation}` + // All segment name shapes are same except redis/memcached + if (system === DbSystemValues.REDIS || system === DbSystemValues.MEMCACHED) { + name = `Datastore/operation/${system}/${operation}` + } + return this.agent.tracer.createSegment({ + name, + parent: context.segment, + transaction: context.transaction + }) + } } module.exports = SegmentSynthesizer diff --git a/test/unit/instrumentation/elasticsearch.test.js b/test/unit/db/query-parsers/elasticsearch.test.js similarity index 97% rename from test/unit/instrumentation/elasticsearch.test.js rename to test/unit/db/query-parsers/elasticsearch.test.js index 02ed7d6bda..d91ac7a0d6 100644 --- a/test/unit/instrumentation/elasticsearch.test.js +++ b/test/unit/db/query-parsers/elasticsearch.test.js @@ -7,7 +7,7 @@ const test = require('node:test') const assert = require('node:assert') -const { parsePath, queryParser } = require('../../../lib/instrumentation/@elastic/elasticsearch') +const { parsePath, queryParser } = require('../../../../lib/db/query-parsers/elasticsearch') const methods = [ { name: 'GET', expected: 'get' }, { name: 'PUT', expected: 'create' }, diff --git a/test/unit/lib/otel/fixtures/db-sql.js b/test/unit/lib/otel/fixtures/db-sql.js new file mode 100644 index 0000000000..77a95de656 --- /dev/null +++ b/test/unit/lib/otel/fixtures/db-sql.js @@ -0,0 +1,61 @@ +/* + * Copyright 2024 New Relic Corporation. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' +const { + SEMATTRS_DB_SYSTEM, + SEMATTRS_DB_SQL_TABLE, + SEMATTRS_DB_OPERATION, + SEMATTRS_DB_STATEMENT, + DbSystemValues, + SEMATTRS_DB_MONGODB_COLLECTION +} = require('@opentelemetry/semantic-conventions') +const { SpanKind } = require('@opentelemetry/api') +const createSpan = require('./span') + +function createDbClientSpan({ parentId, tracer, tx, name = 'test-span' }) { + const span = createSpan({ name, kind: SpanKind.CLIENT, parentId, tracer, tx }) + span.setAttribute(SEMATTRS_DB_SYSTEM, 'custom-db') + span.setAttribute(SEMATTRS_DB_SQL_TABLE, 'test-table') + span.setAttribute(SEMATTRS_DB_OPERATION, 'select') + return span +} + +function createDbStatementSpan({ parentId, tracer, tx, name = 'test-span' }) { + const span = createSpan({ name, kind: SpanKind.CLIENT, parentId, tracer, tx }) + span.setAttribute(SEMATTRS_DB_SYSTEM, 'custom-db') + span.setAttribute(SEMATTRS_DB_STATEMENT, 'select * from test-table') + return span +} + +function createMemcachedDbSpan({ parentId, tracer, tx, name = 'test-span' }) { + const span = createSpan({ name, kind: SpanKind.CLIENT, parentId, tracer, tx }) + span.setAttribute(SEMATTRS_DB_SYSTEM, DbSystemValues.MEMCACHED) + span.setAttribute(SEMATTRS_DB_OPERATION, 'set') + return span +} + +function createMongoDbSpan({ parentId, tracer, tx, name = 'test-span' }) { + const span = createSpan({ name, kind: SpanKind.CLIENT, parentId, tracer, tx }) + span.setAttribute(SEMATTRS_DB_SYSTEM, DbSystemValues.MONGODB) + span.setAttribute(SEMATTRS_DB_OPERATION, 'insert') + span.setAttribute(SEMATTRS_DB_MONGODB_COLLECTION, 'test-collection') + return span +} + +function createRedisDbSpan({ parentId, tracer, tx, name = 'test-span' }) { + const span = createSpan({ name, kind: SpanKind.CLIENT, parentId, tracer, tx }) + span.setAttribute(SEMATTRS_DB_SYSTEM, DbSystemValues.REDIS) + span.setAttribute(SEMATTRS_DB_STATEMENT, 'hset hash random random') + return span +} + +module.exports = { + createDbClientSpan, + createDbStatementSpan, + createMemcachedDbSpan, + createMongoDbSpan, + createRedisDbSpan +} diff --git a/test/unit/lib/otel/fixtures/http-client.js b/test/unit/lib/otel/fixtures/http-client.js new file mode 100644 index 0000000000..372b0debcc --- /dev/null +++ b/test/unit/lib/otel/fixtures/http-client.js @@ -0,0 +1,16 @@ +/* + * Copyright 2024 New Relic Corporation. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' +const { SEMATTRS_HTTP_HOST, SEMATTRS_HTTP_METHOD } = require('@opentelemetry/semantic-conventions') +const { SpanKind } = require('@opentelemetry/api') +const createSpan = require('./span') + +module.exports = function createHttpClientSpan({ parentId, tracer, tx }) { + const span = createSpan({ name: 'test-span', kind: SpanKind.CLIENT, parentId, tracer, tx }) + span.setAttribute(SEMATTRS_HTTP_METHOD, 'GET') + span.setAttribute(SEMATTRS_HTTP_HOST, 'newrelic.com') + return span +} diff --git a/test/unit/lib/otel/fixtures/index.js b/test/unit/lib/otel/fixtures/index.js new file mode 100644 index 0000000000..22d7094a06 --- /dev/null +++ b/test/unit/lib/otel/fixtures/index.js @@ -0,0 +1,25 @@ +/* + * Copyright 2024 New Relic Corporation. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' +const { + createDbClientSpan, + createDbStatementSpan, + createMemcachedDbSpan, + createMongoDbSpan, + createRedisDbSpan +} = require('./db-sql') +const createSpan = require('./span') +const createHttpClientSpan = require('./http-client') + +module.exports = { + createDbClientSpan, + createDbStatementSpan, + createHttpClientSpan, + createMemcachedDbSpan, + createMongoDbSpan, + createRedisDbSpan, + createSpan +} diff --git a/test/unit/lib/otel/fixtures/span.js b/test/unit/lib/otel/fixtures/span.js new file mode 100644 index 0000000000..cfa8117dff --- /dev/null +++ b/test/unit/lib/otel/fixtures/span.js @@ -0,0 +1,17 @@ +/* + * Copyright 2024 New Relic Corporation. All rights reserved. + * SPDX-License-Identifier: Apache-2.0 + */ + +'use strict' +const { ROOT_CONTEXT, TraceFlags } = require('@opentelemetry/api') +const { Span } = require('@opentelemetry/sdk-trace-base') + +module.exports = function createSpan({ parentId, tracer, tx, kind, name }) { + const spanContext = { + traceId: tx.trace.id, + spanId: tx.trace.root.id, + traceFlags: TraceFlags.SAMPLED + } + return new Span(tracer, ROOT_CONTEXT, name, spanContext, kind, parentId) +} diff --git a/test/unit/lib/otel/segment-synthesizer.test.js b/test/unit/lib/otel/segment-synthesizer.test.js index edcf1c6eb9..c00af4b1e5 100644 --- a/test/unit/lib/otel/segment-synthesizer.test.js +++ b/test/unit/lib/otel/segment-synthesizer.test.js @@ -8,15 +8,20 @@ const test = require('node:test') const assert = require('node:assert') const helper = require('../../../lib/agent_helper') -const { ROOT_CONTEXT, SpanKind, TraceFlags } = require('@opentelemetry/api') -const { BasicTracerProvider, Span } = require('@opentelemetry/sdk-trace-base') +const { BasicTracerProvider } = require('@opentelemetry/sdk-trace-base') const SegmentSynthesizer = require('../../../../lib/otel/segment-synthesis') -const { - SEMATTRS_DB_SYSTEM, - SEMATTRS_HTTP_HOST, - SEMATTRS_HTTP_METHOD -} = require('@opentelemetry/semantic-conventions') const createMockLogger = require('../../mocks/logger') +const { + createDbClientSpan, + createSpan, + createHttpClientSpan, + createDbStatementSpan, + createMongoDbSpan, + createRedisDbSpan, + createMemcachedDbSpan +} = require('./fixtures') +const { SEMATTRS_HTTP_METHOD, SEMATTRS_DB_SYSTEM } = require('@opentelemetry/semantic-conventions') +const { SpanKind } = require('@opentelemetry/api') test.beforeEach((ctx) => { const loggerMock = createMockLogger() @@ -40,14 +45,7 @@ test.afterEach((ctx) => { test('should create http external segment from otel http client span', (t, end) => { const { agent, synthesizer, parentId, tracer } = t.nr helper.runInTransaction(agent, (tx) => { - const spanContext = { - traceId: tx.trace.id, - spanId: tx.trace.root.id, - traceFlags: TraceFlags.SAMPLED - } - const span = new Span(tracer, ROOT_CONTEXT, 'test-span', spanContext, SpanKind.CLIENT, parentId) - span.setAttribute(SEMATTRS_HTTP_METHOD, 'GET') - span.setAttribute(SEMATTRS_HTTP_HOST, 'newrelic.com') + const span = createHttpClientSpan({ tx, parentId, tracer }) const segment = synthesizer.synthesize(span) assert.equal(segment.name, 'External/newrelic.com') assert.equal(segment.parentId, tx.trace.root.id) @@ -56,22 +54,91 @@ test('should create http external segment from otel http client span', (t, end) }) }) -test('should log warning if a rule does have a synthesis for the given type', (t, end) => { +test('should create db segment', (t, end) => { + const { agent, synthesizer, parentId, tracer } = t.nr + helper.runInTransaction(agent, (tx) => { + const span = createDbClientSpan({ tx, parentId, tracer }) + const segment = synthesizer.synthesize(span) + assert.equal(segment.name, 'Datastore/statement/custom-db/test-table/select') + assert.equal(segment.parentId, tx.trace.root.id) + tx.end() + end() + }) +}) + +test('should create db segment and get operation and table from db.statement', (t, end) => { + const { agent, synthesizer, parentId, tracer } = t.nr + helper.runInTransaction(agent, (tx) => { + const span = createDbStatementSpan({ tx, parentId, tracer }) + const segment = synthesizer.synthesize(span) + assert.equal(segment.name, 'Datastore/statement/custom-db/test-table/select') + assert.equal(segment.parentId, tx.trace.root.id) + tx.end() + end() + }) +}) + +test('should create db segment and get collection from db.mongodb.collection', (t, end) => { + const { agent, synthesizer, parentId, tracer } = t.nr + helper.runInTransaction(agent, (tx) => { + const span = createMongoDbSpan({ tx, parentId, tracer }) + const segment = synthesizer.synthesize(span) + assert.equal(segment.name, 'Datastore/statement/mongodb/test-collection/insert') + assert.equal(segment.parentId, tx.trace.root.id) + tx.end() + end() + }) +}) + +test('should create db segment and get operation from db.statement when system is redis', (t, end) => { + const { agent, synthesizer, parentId, tracer } = t.nr + helper.runInTransaction(agent, (tx) => { + const span = createRedisDbSpan({ tx, parentId, tracer }) + const segment = synthesizer.synthesize(span) + assert.equal(segment.name, 'Datastore/operation/redis/hset') + assert.equal(segment.parentId, tx.trace.root.id) + tx.end() + end() + }) +}) + +test('should create db segment and get operation from db.operation when system is memcached', (t, end) => { + const { agent, synthesizer, parentId, tracer } = t.nr + helper.runInTransaction(agent, (tx) => { + const span = createMemcachedDbSpan({ tx, parentId, tracer }) + const segment = synthesizer.synthesize(span) + assert.equal(segment.name, 'Datastore/operation/memcached/set') + assert.equal(segment.parentId, tx.trace.root.id) + tx.end() + end() + }) +}) + +test('should log table and operation as unknown when the db.system, db.sql.table and db.operation to not exist as span attributes', (t, end) => { + const { agent, synthesizer, parentId, tracer } = t.nr + helper.runInTransaction(agent, (tx) => { + const span = createSpan({ name: 'test-span', kind: SpanKind.CLIENT, parentId, tx, tracer }) + span.setAttribute(SEMATTRS_DB_SYSTEM, 'test-db') + + const segment = synthesizer.synthesize(span) + assert.equal(segment.name, 'Datastore/statement/test-db/Unknown/Unknown') + assert.equal(segment.parentId, tx.trace.root.id) + tx.end() + end() + }) +}) + +test('should log warning when span does not have a synthesis rule', (t, end) => { const { agent, synthesizer, loggerMock, parentId, tracer } = t.nr helper.runInTransaction(agent, (tx) => { - const spanContext = { - traceId: tx.trace.id, - spanId: tx.trace.root.id, - traceFlags: TraceFlags.SAMPLED - } - const span = new Span(tracer, ROOT_CONTEXT, 'test-span', spanContext, SpanKind.CLIENT, parentId) - span.setAttribute(SEMATTRS_DB_SYSTEM, 'postgres') + const span = createSpan({ name: 'test-span', kind: SpanKind.SERVER, parentId, tx, tracer }) + span.setAttribute(SEMATTRS_HTTP_METHOD, 'get') const segment = synthesizer.synthesize(span) assert.ok(!segment) assert.deepEqual(loggerMock.debug.args[0], [ - 'Found type: %s, no synthesize rule currently built', - 'db' + 'Found type: %s, no synthesis rule currently built', + 'server' ]) tx.end() end() @@ -82,13 +149,7 @@ test('should log warning span does not match a rule', (t, end) => { const { agent, synthesizer, loggerMock, parentId, tracer } = t.nr helper.runInTransaction(agent, (tx) => { - const spanContext = { - traceId: tx.trace.id, - spanId: tx.trace.root.id, - traceFlags: TraceFlags.SAMPLED - } - - const span = new Span(tracer, ROOT_CONTEXT, 'test-span', spanContext, 'bogus', parentId) + const span = createSpan({ name: 'test-span', kind: 'bogus', parentId, tx, tracer }) const segment = synthesizer.synthesize(span) assert.ok(!segment) assert.deepEqual(loggerMock.debug.args[0], [