diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Caching/LRUCache.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Caching/LRUCache.cs new file mode 100644 index 0000000000..9dc81d1446 --- /dev/null +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Caching/LRUCache.cs @@ -0,0 +1,121 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Collections.Generic; +using System.Threading; + +namespace NewRelic.Agent.Extensions.Caching +{ + /// + /// A thread-safe LRU cache implementation. + /// + /// + /// + public class LRUCache + { + private readonly int _capacity; + private readonly Dictionary> _cacheMap; + private readonly LinkedList _lruList; + private readonly ReaderWriterLockSlim _lock = new(); + + public LRUCache(int capacity) + { + if (capacity <= 0) + { + throw new ArgumentException("Capacity must be greater than zero.", nameof(capacity)); + } + + _capacity = capacity; + _cacheMap = new Dictionary>(capacity); + _lruList = new LinkedList(); + } + + public TValue Get(TKey key) + { + _lock.EnterUpgradeableReadLock(); + try + { + if (_cacheMap.TryGetValue(key, out var node)) + { + // Move the accessed node to the front of the list + _lock.EnterWriteLock(); + try + { + _lruList.Remove(node); + _lruList.AddFirst(node); + } + finally + { + _lock.ExitWriteLock(); + } + return node.Value.Value; + } + throw new KeyNotFoundException("The given key was not present in the cache."); + } + finally + { + _lock.ExitUpgradeableReadLock(); + } + } + + public void Put(TKey key, TValue value) + { + _lock.EnterWriteLock(); + try + { + if (_cacheMap.TryGetValue(key, out var node)) + { + // Update the value and move the node to the front of the list + node.Value.Value = value; + _lruList.Remove(node); + _lruList.AddFirst(node); + } + else + { + if (_cacheMap.Count >= _capacity) + { + // Remove the least recently used item + var lruNode = _lruList.Last; + _cacheMap.Remove(lruNode.Value.Key); + _lruList.RemoveLast(); + } + + // Add the new item to the cache + var cacheItem = new CacheItem(key, value); + var newNode = new LinkedListNode(cacheItem); + _lruList.AddFirst(newNode); + _cacheMap[key] = newNode; + } + } + finally + { + _lock.ExitWriteLock(); + } + } + public bool ContainsKey(TKey key) + { + _lock.EnterReadLock(); + try + { + return _cacheMap.ContainsKey(key); + } + finally + { + _lock.ExitReadLock(); + } + } + + private class CacheItem + { + public TKey Key { get; } + public TValue Value { get; set; } + + public CacheItem(TKey key, TValue value) + { + Key = key; + Value = value; + } + } + } +} diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Caching/LRUHashSet.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Caching/LRUHashSet.cs new file mode 100644 index 0000000000..33ee77a056 --- /dev/null +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Caching/LRUHashSet.cs @@ -0,0 +1,114 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Collections.Generic; +using System.Threading; + +namespace NewRelic.Agent.Extensions.Caching +{ + /// + /// A thread-safe LRU HashSet implementation. + /// + /// + public class LRUHashSet + { + private readonly int _capacity; + private readonly HashSet _hashSet; + private readonly LinkedList _lruList; + private readonly ReaderWriterLockSlim _lock = new(); + + public LRUHashSet(int capacity) + { + if (capacity <= 0) + { + throw new ArgumentException("Capacity must be greater than zero.", nameof(capacity)); + } + + _capacity = capacity; + _hashSet = new HashSet(); + _lruList = new LinkedList(); + } + + public bool Add(T item) + { + _lock.EnterWriteLock(); + try + { + if (_hashSet.Contains(item)) + { + // Move the accessed item to the front of the list + _lruList.Remove(item); + _lruList.AddFirst(item); + return false; + } + else + { + if (_hashSet.Count >= _capacity) + { + // Remove the least recently used item + var lruItem = _lruList.Last.Value; + _hashSet.Remove(lruItem); + _lruList.RemoveLast(); + } + + // Add the new item to the set and list + _hashSet.Add(item); + _lruList.AddFirst(item); + return true; + } + } + finally + { + _lock.ExitWriteLock(); + } + } + + public bool Contains(T item) + { + _lock.EnterReadLock(); + try + { + return _hashSet.Contains(item); + } + finally + { + _lock.ExitReadLock(); + } + } + + public bool Remove(T item) + { + _lock.EnterWriteLock(); + try + { + if (_hashSet.Remove(item)) + { + _lruList.Remove(item); + return true; + } + return false; + } + finally + { + _lock.ExitWriteLock(); + } + } + + public int Count + { + get + { + _lock.EnterReadLock(); + try + { + return _hashSet.Count; + } + finally + { + _lock.ExitReadLock(); + } + } + } + } +} diff --git a/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Caching/WeakReferenceKey.cs b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Caching/WeakReferenceKey.cs new file mode 100644 index 0000000000..478498fe5e --- /dev/null +++ b/src/Agent/NewRelic/Agent/Extensions/NewRelic.Agent.Extensions/Caching/WeakReferenceKey.cs @@ -0,0 +1,50 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; + +namespace NewRelic.Agent.Extensions.Caching +{ + /// + /// Creates an object that can be used as a dictionary key, which holds a WeakReference<T> + /// + /// + public class WeakReferenceKey where T : class + { + private WeakReference WeakReference { get; } + + public WeakReferenceKey(T cacheKey) + { + WeakReference = new WeakReference(cacheKey); + } + + public override bool Equals(object obj) + { + if (obj is WeakReferenceKey otherKey) + { + if (WeakReference.TryGetTarget(out var thisTarget) && + otherKey.WeakReference.TryGetTarget(out var otherTarget)) + { + return ReferenceEquals(thisTarget, otherTarget); + } + } + + return false; + } + + public override int GetHashCode() + { + if (WeakReference.TryGetTarget(out var target)) + { + return target.GetHashCode(); + } + + return 0; + } + + /// + /// Gets the value from the WeakReference or null if the target has been garbage collected. + /// + public T Value => WeakReference.TryGetTarget(out var target) ? target : null; + } +} diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AmazonServiceClientWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AmazonServiceClientWrapper.cs index 615a9ea8e5..d61c7d642f 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AmazonServiceClientWrapper.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AmazonServiceClientWrapper.cs @@ -4,49 +4,62 @@ using System; using NewRelic.Agent.Api; using NewRelic.Agent.Extensions.AwsSdk; +using NewRelic.Agent.Extensions.Caching; using NewRelic.Agent.Extensions.Providers.Wrapper; -namespace NewRelic.Providers.Wrapper.AwsSdk +namespace NewRelic.Providers.Wrapper.AwsSdk; + +public class AmazonServiceClientWrapper : IWrapper { - public class AmazonServiceClientWrapper : IWrapper + private const int LRUCapacity = 100; + // cache the account id per instance of AmazonServiceClient.Config + public static LRUCache, string> AwsAccountIdByClientConfigCache = new(LRUCapacity); + + // cache instances of AmazonServiceClient + private static readonly LRUHashSet> AmazonServiceClientInstanceCache = new(LRUCapacity); + + public bool IsTransactionRequired => false; + + public CanWrapResponse CanWrap(InstrumentedMethodInfo instrumentedMethodInfo) { - /// - /// The AWS account id. - /// Parsed from the access key in the credentials of the client - or fall back to the configuration value if parsing fails. - /// Assumes only a single account id is used in the application. - /// - public static string AwsAccountId { get; private set; } + return new CanWrapResponse(instrumentedMethodInfo.RequestedWrapperName == nameof(AmazonServiceClientWrapper)); + } + + public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall instrumentedMethodCall, IAgent agent, ITransaction transaction) + { + object client = instrumentedMethodCall.MethodCall.InvocationTarget; - public bool IsTransactionRequired => false; + var weakReferenceKey = new WeakReferenceKey(client); + if (AmazonServiceClientInstanceCache.Contains(weakReferenceKey)) // don't do anything if we've already seen this client instance + return Delegates.NoOp; - public CanWrapResponse CanWrap(InstrumentedMethodInfo instrumentedMethodInfo) + AmazonServiceClientInstanceCache.Add(weakReferenceKey); + + string awsAccountId; + try { - return new CanWrapResponse(instrumentedMethodInfo.RequestedWrapperName == nameof(AmazonServiceClientWrapper)); + // get the AWSCredentials parameter + dynamic awsCredentials = instrumentedMethodCall.MethodCall.MethodArguments[0]; + + dynamic immutableCredentials = awsCredentials.GetCredentials(); + string accessKey = immutableCredentials.AccessKey; + + // convert the access key to an account id + awsAccountId = AwsAccountIdDecoder.GetAccountId(accessKey); + } + catch (Exception e) + { + agent.Logger.Info($"Unable to parse AWS Account ID from AccessKey. Using AccountId from configuration instead. Exception: {e.Message}"); + awsAccountId = agent.Configuration.AwsAccountId; } - public AfterWrappedMethodDelegate BeforeWrappedMethod(InstrumentedMethodCall instrumentedMethodCall, IAgent agent, ITransaction transaction) + return Delegates.GetDelegateFor(onComplete: () => { - if (AwsAccountId != null) - return Delegates.NoOp; - - try - { - // get the AWSCredentials parameter - dynamic awsCredentials = instrumentedMethodCall.MethodCall.MethodArguments[0]; - - dynamic immutableCredentials = awsCredentials.GetCredentials(); - string accessKey = immutableCredentials.AccessKey; - - // convert the access key to an account id - AwsAccountId = AwsAccountIdDecoder.GetAccountId(accessKey); - } - catch (Exception e) - { - agent.Logger.Info($"Unable to parse AWS Account ID from AccessKey. Using AccountId from configuration instead. Exception: {e.Message}"); - AwsAccountId = agent.Configuration.AwsAccountId; - } + // get the _config field from the client + object clientConfig = ((dynamic)client).Config; - return Delegates.NoOp; - } + // cache the account id using clientConfig as the key + AwsAccountIdByClientConfigCache.Put(new WeakReferenceKey(clientConfig), awsAccountId); + }); } } diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs index d8e7f02642..03e49fa48f 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/AwsSdkPipelineWrapper.cs @@ -5,6 +5,7 @@ using System.Linq; using NewRelic.Agent.Api; using NewRelic.Agent.Extensions.AwsSdk; +using NewRelic.Agent.Extensions.Caching; using NewRelic.Agent.Extensions.Collections; using NewRelic.Agent.Extensions.Providers.Wrapper; using NewRelic.Providers.Wrapper.AwsSdk.RequestHandlers; @@ -32,8 +33,8 @@ private ArnBuilder CreateArnBuilder(IAgent agent, dynamic requestContext) string accountId = null; try { - accountId = GetAccountId(agent); var clientConfig = requestContext.ClientConfig; + accountId = GetAccountId(agent, clientConfig); if (clientConfig.RegionEndpoint != null) { var regionEndpoint = clientConfig.RegionEndpoint; @@ -53,9 +54,10 @@ private ArnBuilder CreateArnBuilder(IAgent agent, dynamic requestContext) return new ArnBuilder(partition, systemName, accountId); } - private string GetAccountId(IAgent agent) + private string GetAccountId(IAgent agent, object clientConfig) { - string accountId = AmazonServiceClientWrapper.AwsAccountId; + var cacheKey = new WeakReferenceKey(clientConfig); + string accountId = AmazonServiceClientWrapper.AwsAccountIdByClientConfigCache.ContainsKey(cacheKey) ? AmazonServiceClientWrapper.AwsAccountIdByClientConfigCache.Get(cacheKey) : agent.Configuration.AwsAccountId; if (accountId != null) { diff --git a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/RequestHandlers/DynamoDbRequestHandler.cs b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/RequestHandlers/DynamoDbRequestHandler.cs index 4e3b9922aa..bdf0371a96 100644 --- a/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/RequestHandlers/DynamoDbRequestHandler.cs +++ b/src/Agent/NewRelic/Agent/Extensions/Providers/Wrapper/AwsSdk/RequestHandlers/DynamoDbRequestHandler.cs @@ -12,35 +12,60 @@ namespace NewRelic.Providers.Wrapper.AwsSdk.RequestHandlers { internal static class DynamoDbRequestHandler { + private static readonly ConcurrentDictionary _operationNameCache = new(); - private static ConcurrentDictionary _operationNameCache = new ConcurrentDictionary(); - - public static AfterWrappedMethodDelegate HandleDynamoDbRequest(InstrumentedMethodCall instrumentedMethodCall, IAgent agent, ITransaction transaction, dynamic request, bool isAsync, dynamic executionContext) + public static AfterWrappedMethodDelegate HandleDynamoDbRequest(InstrumentedMethodCall instrumentedMethodCall, IAgent agent, ITransaction transaction, dynamic request, bool isAsync, ArnBuilder builder) { - var requestType = request.GetType().Name as string; - - string model; - string operation; + var requestType = ((object)request).GetType().Name; - // PutItemRequest => put_item, - // CreateTableRequest => create_table, etc. - operation = _operationNameCache.GetOrAdd(requestType, GetOperationNameFromRequestType); + var operation = _operationNameCache.GetOrAdd(requestType, requestType.Replace("Request", string.Empty).ToSnakeCase()); - // Even though there is no common interface they all implement, every Request type I checked - // has a TableName property - model = request.TableName; + // all request objects implement a TableName property + string model = request.TableName; var segment = transaction.StartDatastoreSegment(instrumentedMethodCall.MethodCall, new ParsedSqlStatement(DatastoreVendor.DynamoDB, model, operation), isLeaf: true); + var arn = builder.Build("dynamodb", $"table/{model}"); + if (!string.IsNullOrEmpty(arn)) + segment.AddCloudSdkAttribute("cloud.resource_id", arn); + segment.AddCloudSdkAttribute("aws.operation", operation); + segment.AddCloudSdkAttribute("aws.region", builder.Region); + return isAsync ? - Delegates.GetAsyncDelegateFor(agent, segment) + Delegates.GetAsyncDelegateFor(agent, segment, true, responseTask => + { + try + { + if (responseTask.IsFaulted) + transaction.NoticeError(responseTask.Exception); + else + SetRequestIdIfAvailable(agent, segment, ((dynamic)responseTask).Result); + } + finally + { + segment.End(); + } + + }, TaskContinuationOptions.ExecuteSynchronously) : - Delegates.GetDelegateFor(segment); + Delegates.GetDelegateFor( + onFailure: segment.End, + onSuccess: response => + { + SetRequestIdIfAvailable(agent, segment, response); + segment.End(); + } + ); + } - private static string GetOperationNameFromRequestType(string requestType) + private static void SetRequestIdIfAvailable(IAgent agent, ISegment segment, dynamic response) { - return requestType.Replace("Request", string.Empty).ToSnakeCase(); + if (response != null && response.ResponseMetadata != null && response.ResponseMetadata.RequestId != null) + { + string requestId = response.ResponseMetadata.RequestId; + segment.AddCloudSdkAttribute("aws.requestId", requestId); + } } } } diff --git a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkDynamoDBExerciser.cs b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkDynamoDBExerciser.cs index e07fe0aa84..a0ddde2000 100644 --- a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkDynamoDBExerciser.cs +++ b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkDynamoDBExerciser.cs @@ -8,7 +8,6 @@ using System; using System.Collections.Generic; using Amazon.Runtime; -using System.Threading; namespace AwsSdkTestApp.AwsSdkExercisers { @@ -24,12 +23,18 @@ public AwsSdkDynamoDBExerciser() private AmazonDynamoDBClient GetDynamoDBClient() { + AmazonDynamoDBConfig clientConfig = new AmazonDynamoDBConfig + { + // Set the endpoint URL + ServiceURL = "http://dynamodb:8000", // port must match what is set in docker compose + AuthenticationRegion = "us-east-2" + //RegionEndpoint = RegionEndpoint.USEast2 **DO NOT* specify RegionEndpoint for local tests + }; + + // use plausible (but fake) access key and fake secret key so account id parsing can be tested + var creds = new BasicAWSCredentials("FOOIHSFODNNAEXAMPLE", + "MOREGIBBERISH"); // account id will be "520056171328" - AmazonDynamoDBConfig clientConfig = new AmazonDynamoDBConfig(); - // Set the endpoint URL - clientConfig.ServiceURL = "http://dynamodb:8000"; // port must match what is set in docker compose - clientConfig.AuthenticationRegion = "us-west-2"; - var creds = new BasicAWSCredentials("xxx", "xxx"); AmazonDynamoDBClient client = new AmazonDynamoDBClient(creds, clientConfig); return client; diff --git a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkSQSExerciser.cs b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkSQSExerciser.cs index ffef934382..922a1ab8b1 100644 --- a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkSQSExerciser.cs +++ b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/AwsSdkExercisers/AwsSdkSQSExerciser.cs @@ -8,6 +8,7 @@ using Amazon.SQS.Model; using System.Linq; using System.Collections.Generic; +using Amazon.Runtime; namespace AwsSdkTestApp.AwsSdkExercisers { @@ -25,14 +26,15 @@ public AwsSdkSQSExerciser() private AmazonSQSClient GetSqsClient() { // configure the client to use LocalStack - var awsCredentials = new Amazon.Runtime.BasicAWSCredentials("dummy", "dummy"); + // use plausible (but fake) access key and fake secret key so account id parsing can be tested + var creds = new BasicAWSCredentials("FOOIHSHSDNNAEXAMPLE", "MOREGIBBERISH"); var config = new AmazonSQSConfig { ServiceURL = "http://localstack:4566", AuthenticationRegion = "us-west-2" }; - var sqsClient = new AmazonSQSClient(awsCredentials, config); + var sqsClient = new AmazonSQSClient(creds, config); return sqsClient; } diff --git a/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkMultiServiceController.cs b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkMultiServiceController.cs new file mode 100644 index 0000000000..c1f63f858e --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerApplications/AwsSdkTestApp/Controllers/AwsSdkMultiServiceController.cs @@ -0,0 +1,59 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.ComponentModel.DataAnnotations; +using System.Linq; +using System.Threading.Tasks; +using AwsSdkTestApp.AwsSdkExercisers; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; + +namespace AwsSdkTestApp.Controllers +{ + [ApiController] + [Route("[controller]")] + public class AwsSdkMultiServiceController : ControllerBase + { + private readonly ILogger _logger; + + public AwsSdkMultiServiceController(ILogger logger) + { + _logger = logger; + _logger.LogInformation("Created AwsSdkMultiServiceController"); + } + + [HttpGet("CallMultipleServicesAsync")] + public async Task CallMultipleServicesAsync([FromQuery, Required]string queueName, [FromQuery, Required]string tableName, [FromQuery, Required]string bookName) + { + _logger.LogInformation("Starting CallMultipleServicesAsync"); + + using var sqsExerciser = new AwsSdkSQSExerciser(); + using var dynamoDbExerciser = new AwsSdkDynamoDBExerciser(); + + await sqsExerciser.SQS_InitializeAsync(queueName); + + // send an SQS message + await sqsExerciser.SQS_SendMessageAsync(bookName); + + await Task.Delay(TimeSpan.FromSeconds(2)); // may not really be necessary + + // receive an SQS message + var messages = await sqsExerciser.SQS_ReceiveMessageAsync(); + + var movieName = messages.First().Body; + + // create a DynamoDB table + await dynamoDbExerciser.CreateTableAsync(tableName); + // put an item in a DynamoDB table + await dynamoDbExerciser.PutItemAsync(tableName, movieName, "2021"); + + // delete the table + await dynamoDbExerciser.DeleteTableAsync(tableName); + + await sqsExerciser.SQS_TeardownAsync(); + + _logger.LogInformation("Finished CallMultipleServicesAsync"); + } + } +} diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerDynamoDBTestFixture.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerDynamoDBTestFixture.cs new file mode 100644 index 0000000000..313858f5d6 --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerDynamoDBTestFixture.cs @@ -0,0 +1,56 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using NewRelic.Agent.ContainerIntegrationTests.Applications; + +namespace NewRelic.Agent.ContainerIntegrationTests.Fixtures; + +public class AwsSdkContainerDynamoDBTestFixture : AwsSdkContainerTestFixtureBase +{ + private const string Dockerfile = "AwsSdkTestApp/Dockerfile"; + private const ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; + private const string DistroTag = "jammy"; + + private readonly string BaseUrl; + + public AwsSdkContainerDynamoDBTestFixture() : base(DistroTag, Architecture, Dockerfile) + { + BaseUrl = $"http://localhost:{Port}/awssdkdynamodb"; + } + + public void CreateTableAsync(string tableName) + { + GetAndAssertStatusCode($"{BaseUrl}/CreateTableAsync?tableName={tableName}", System.Net.HttpStatusCode.OK); + } + public void DeleteTableAsync(string tableName) + { + GetAndAssertStatusCode($"{BaseUrl}/DeleteTableAsync?tableName={tableName}", System.Net.HttpStatusCode.OK); + } + + public void PutItemAsync(string tableName, string title, string year) + { + GetAndAssertStatusCode($"{BaseUrl}/PutItemAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); + } + public void GetItemAsync(string tableName, string title, string year) + { + GetAndAssertStatusCode($"{BaseUrl}/GetItemAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); + } + public void UpdateItemAsync(string tableName, string title, string year) + { + GetAndAssertStatusCode($"{BaseUrl}/UpdateItemAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); + } + + public void DeleteItemAsync(string tableName, string title, string year) + { + GetAndAssertStatusCode($"{BaseUrl}/DeleteItemAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); + } + public void QueryAsync(string tableName, string title, string year) + { + GetAndAssertStatusCode($"{BaseUrl}/QueryAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); + } + public void ScanAsync(string tableName) + { + GetAndAssertStatusCode($"{BaseUrl}/ScanAsync?tableName={tableName}", System.Net.HttpStatusCode.OK); + } + +} diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerMultiServiceTestFixture.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerMultiServiceTestFixture.cs new file mode 100644 index 0000000000..d55ce7d03e --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerMultiServiceTestFixture.cs @@ -0,0 +1,25 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using NewRelic.Agent.ContainerIntegrationTests.Applications; + +namespace NewRelic.Agent.ContainerIntegrationTests.Fixtures; + +public class AwsSdkContainerMultiServiceTestFixture : AwsSdkContainerTestFixtureBase +{ + private const string Dockerfile = "AwsSdkTestApp/Dockerfile"; + private const ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; + private const string DistroTag = "jammy"; + + private readonly string BaseUrl; + + public AwsSdkContainerMultiServiceTestFixture() : base(DistroTag, Architecture, Dockerfile) + { + BaseUrl = $"http://localhost:{Port}/awssdkmultiservice"; + } + + public void ExerciseMultiService(string tableName, string queueName, string bookName) + { + GetAndAssertStatusCode($"{BaseUrl}/CallMultipleServicesAsync?tableName={tableName}&queueName={queueName}&bookName={bookName}", System.Net.HttpStatusCode.OK); + } +} diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerSQSTestFixture.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerSQSTestFixture.cs new file mode 100644 index 0000000000..75738863be --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerSQSTestFixture.cs @@ -0,0 +1,53 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using NewRelic.Agent.ContainerIntegrationTests.Applications; + +namespace NewRelic.Agent.ContainerIntegrationTests.Fixtures +{ + public class AwsSdkContainerSQSTestFixture : AwsSdkContainerTestFixtureBase + { + private const string Dockerfile = "AwsSdkTestApp/Dockerfile"; + private const ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; + private const string DistroTag = "jammy"; + + private readonly string BaseUrl; + + public AwsSdkContainerSQSTestFixture() : base(DistroTag, Architecture, Dockerfile) + { + BaseUrl = $"http://localhost:{Port}/awssdksqs"; + } + + public void ExerciseSQS_SendReceivePurge(string queueName) + { + // The exerciser will return a 500 error if the `RequestMessage.MessageAttributeNames` collection is modified by our instrumentation. + // See https://github.com/newrelic/newrelic-dotnet-agent/pull/2646 + GetAndAssertStatusCode($"{BaseUrl}/SQS_SendReceivePurge?queueName={queueName}", System.Net.HttpStatusCode.OK); + } + + public string ExerciseSQS_SendAndReceiveInSeparateTransactions(string queueName) + { + var queueUrl = GetString($"{BaseUrl}/SQS_InitializeQueue?queueName={queueName}"); + + GetAndAssertStatusCode($"{BaseUrl}/SQS_SendMessageToQueue?message=Hello&messageQueueUrl={queueUrl}", System.Net.HttpStatusCode.OK); + + var messagesJson = GetString($"{BaseUrl}/SQS_ReceiveMessageFromQueue?messageQueueUrl={queueUrl}"); + + GetAndAssertStatusCode($"{BaseUrl}/SQS_DeleteQueue?messageQueueUrl={queueUrl}", System.Net.HttpStatusCode.OK); + + return messagesJson; + } + + public string ExerciseSQS_ReceiveEmptyMessage(string queueName) + { + var queueUrl = GetString($"{BaseUrl}/SQS_InitializeQueue?queueName={queueName}"); + + var messagesJson = GetString($"{BaseUrl}/SQS_ReceiveMessageFromQueue?messageQueueUrl={queueUrl}"); + + GetAndAssertStatusCode($"{BaseUrl}/SQS_DeleteQueue?messageQueueUrl={queueUrl}", System.Net.HttpStatusCode.OK); + + return messagesJson; + } + + } +} diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerTestFixtures.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerTestFixtures.cs index b70159f279..a40c277e3e 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerTestFixtures.cs +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Fixtures/AwsSdkContainerTestFixtures.cs @@ -4,122 +4,24 @@ using System; using System.Threading.Tasks; using NewRelic.Agent.ContainerIntegrationTests.Applications; -using NewRelic.Agent.ContainerIntegrationTests.Fixtures; using NewRelic.Agent.IntegrationTestHelpers.RemoteServiceFixtures; -namespace NewRelic.Agent.ContainerIntegrationTests.Fixtures -{ - public abstract class AwsSdkContainerTestFixtureBase( - string distroTag, - ContainerApplication.Architecture containerArchitecture, - string dockerfile, - string dockerComposeFile = "docker-compose-awssdk.yml") - : RemoteApplicationFixture(new ContainerApplication(distroTag, containerArchitecture, DotnetVersion, dockerfile, - dockerComposeFile, "awssdktestapp")) - { - private const string DotnetVersion = "8.0"; - - protected override int MaxTries => 1; - - public void Delay(int seconds) - { - Task.Delay(TimeSpan.FromSeconds(seconds)).GetAwaiter().GetResult(); - } - } -} - -public class AwsSdkContainerSQSTestFixture : AwsSdkContainerTestFixtureBase -{ - private const string Dockerfile = "AwsSdkTestApp/Dockerfile"; - private const ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; - private const string DistroTag = "jammy"; - - private readonly string BaseUrl; - - public AwsSdkContainerSQSTestFixture() : base(DistroTag, Architecture, Dockerfile) - { - BaseUrl = $"http://localhost:{Port}/awssdksqs"; - } - - public void ExerciseSQS_SendReceivePurge(string queueName) - { - // The exerciser will return a 500 error if the `RequestMessage.MessageAttributeNames` collection is modified by our instrumentation. - // See https://github.com/newrelic/newrelic-dotnet-agent/pull/2646 - GetAndAssertStatusCode($"{BaseUrl}/SQS_SendReceivePurge?queueName={queueName}", System.Net.HttpStatusCode.OK); - } - - public string ExerciseSQS_SendAndReceiveInSeparateTransactions(string queueName) - { - var queueUrl = GetString($"{BaseUrl}/SQS_InitializeQueue?queueName={queueName}"); - - GetAndAssertStatusCode($"{BaseUrl}/SQS_SendMessageToQueue?message=Hello&messageQueueUrl={queueUrl}", System.Net.HttpStatusCode.OK); - - var messagesJson = GetString($"{BaseUrl}/SQS_ReceiveMessageFromQueue?messageQueueUrl={queueUrl}"); - - GetAndAssertStatusCode($"{BaseUrl}/SQS_DeleteQueue?messageQueueUrl={queueUrl}", System.Net.HttpStatusCode.OK); - - return messagesJson; - } - - public string ExerciseSQS_ReceiveEmptyMessage(string queueName) - { - var queueUrl = GetString($"{BaseUrl}/SQS_InitializeQueue?queueName={queueName}"); - - var messagesJson = GetString($"{BaseUrl}/SQS_ReceiveMessageFromQueue?messageQueueUrl={queueUrl}"); +namespace NewRelic.Agent.ContainerIntegrationTests.Fixtures; - GetAndAssertStatusCode($"{BaseUrl}/SQS_DeleteQueue?messageQueueUrl={queueUrl}", System.Net.HttpStatusCode.OK); - - return messagesJson; - } - -} - -public class AwsSdkContainerDynamoDBTestFixture : AwsSdkContainerTestFixtureBase +public abstract class AwsSdkContainerTestFixtureBase( + string distroTag, + ContainerApplication.Architecture containerArchitecture, + string dockerfile, + string dockerComposeFile = "docker-compose-awssdk.yml") + : RemoteApplicationFixture(new ContainerApplication(distroTag, containerArchitecture, DotnetVersion, dockerfile, + dockerComposeFile, "awssdktestapp")) { - private const string Dockerfile = "AwsSdkTestApp/Dockerfile"; - private const ContainerApplication.Architecture Architecture = ContainerApplication.Architecture.X64; - private const string DistroTag = "jammy"; - - private readonly string BaseUrl; + private const string DotnetVersion = "8.0"; - public AwsSdkContainerDynamoDBTestFixture() : base(DistroTag, Architecture, Dockerfile) - { - BaseUrl = $"http://localhost:{Port}/awssdkdynamodb"; - } - - public void CreateTableAsync(string tableName) - { - GetAndAssertStatusCode($"{BaseUrl}/CreateTableAsync?tableName={tableName}", System.Net.HttpStatusCode.OK); - } - public void DeleteTableAsync(string tableName) - { - GetAndAssertStatusCode($"{BaseUrl}/DeleteTableAsync?tableName={tableName}", System.Net.HttpStatusCode.OK); - } + protected override int MaxTries => 1; - public void PutItemAsync(string tableName, string title, string year) + public void Delay(int seconds) { - GetAndAssertStatusCode($"{BaseUrl}/PutItemAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); + Task.Delay(TimeSpan.FromSeconds(seconds)).GetAwaiter().GetResult(); } - public void GetItemAsync(string tableName, string title, string year) - { - GetAndAssertStatusCode($"{BaseUrl}/GetItemAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); - } - public void UpdateItemAsync(string tableName, string title, string year) - { - GetAndAssertStatusCode($"{BaseUrl}/UpdateItemAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); - } - - public void DeleteItemAsync(string tableName, string title, string year) - { - GetAndAssertStatusCode($"{BaseUrl}/DeleteItemAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); - } - public void QueryAsync(string tableName, string title, string year) - { - GetAndAssertStatusCode($"{BaseUrl}/QueryAsync?tableName={tableName}&title={title}&year={year}", System.Net.HttpStatusCode.OK); - } - public void ScanAsync(string tableName) - { - GetAndAssertStatusCode($"{BaseUrl}/ScanAsync?tableName={tableName}", System.Net.HttpStatusCode.OK); - } - } diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkDynamoDBTest.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkDynamoDBTest.cs index c136e83bf7..a574820cc7 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkDynamoDBTest.cs +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkDynamoDBTest.cs @@ -4,13 +4,14 @@ using System; using System.Collections.Generic; using System.Linq; +using NewRelic.Agent.ContainerIntegrationTests.Fixtures; using NewRelic.Agent.IntegrationTestHelpers; using Xunit; using Xunit.Abstractions; namespace NewRelic.Agent.ContainerIntegrationTests.Tests.AwsSdk; -public abstract class AwsSdkDynamoDBTestBase : NewRelicIntegrationTest +public class AwsSdkDynamoDBTest : NewRelicIntegrationTest { private readonly AwsSdkContainerDynamoDBTestFixture _fixture; @@ -18,7 +19,9 @@ public abstract class AwsSdkDynamoDBTestBase : NewRelicIntegrationTest (string)se.IntrinsicAttributes["category"] == "datastore") + .ToList(); + + // select the set of AgentAttributes values with a key of "aws.operation" + var awsOperations = datastoreSpanEvents.Select(se => (string)se.AgentAttributes["aws.operation"]).ToList(); + + + Assert.Multiple( + () => Assert.Equal(0, _fixture.AgentLog.GetWrapperExceptionLineCount()), + () => Assert.Equal(0, _fixture.AgentLog.GetApplicationErrorLineCount()), + + () => Assert.Equal(expectedOperationsCount, datastoreSpanEvents.Count), + () => Assert.Equal(expectedOperationsCount, awsOperations.Intersect(expectedOperations).Count()), + + () => Assert.All(datastoreSpanEvents, se => Assert.Contains(expectedAwsAgentAttributes, key => se.AgentAttributes.ContainsKey(key))), + () => Assert.All(datastoreSpanEvents, se => Assert.Equal(expectedArn, se.AgentAttributes["cloud.resource_id"])), + + () => Assertions.MetricsExist(expectedMetrics, metrics) + ); } } - diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkMultiServiceTest.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkMultiServiceTest.cs new file mode 100644 index 0000000000..e3e1867b6c --- /dev/null +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkMultiServiceTest.cs @@ -0,0 +1,74 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Linq; +using NewRelic.Agent.ContainerIntegrationTests.Fixtures; +using NewRelic.Agent.IntegrationTestHelpers; +using Xunit; +using Xunit.Abstractions; + +namespace NewRelic.Agent.ContainerIntegrationTests.Tests.AwsSdk; + +public class AwsSdkMultiServiceTest : NewRelicIntegrationTest +{ + private readonly AwsSdkContainerMultiServiceTestFixture _fixture; + + private readonly string _tableName = $"TableName_{Guid.NewGuid()}"; + private readonly string _queueName = $"QueueName_{Guid.NewGuid()}"; + private readonly string _bookName = $"BookName_{Guid.NewGuid()}"; + + private const string _expectedAccountId = "520056171328"; // matches the account ID parsed from the fake access key used in AwsSdkDynamoDBExerciser + private const string _unxpectedAccountId = "520198777664"; // matches the account ID parsed from the fake access key used in AwsSdkSQSExerciser + + + public AwsSdkMultiServiceTest(AwsSdkContainerMultiServiceTestFixture fixture, ITestOutputHelper output) : base(fixture) + { + _fixture = fixture; + _fixture.TestLogger = output; + + _fixture.Actions(setupConfiguration: () => + { + var configModifier = new NewRelicConfigModifier(_fixture.DestinationNewRelicConfigFilePath); + configModifier.SetLogLevel("finest"); + configModifier.ForceTransactionTraces(); + configModifier.EnableDistributedTrace(); + configModifier.ConfigureFasterMetricsHarvestCycle(15); + configModifier.ConfigureFasterSpanEventsHarvestCycle(15); + configModifier.ConfigureFasterTransactionTracesHarvestCycle(15); + configModifier.LogToConsole(); + + }, + exerciseApplication: () => + { + _fixture.Delay(5); + + _fixture.ExerciseMultiService(_tableName, _queueName, _bookName); + + _fixture.AgentLog.WaitForLogLine(AgentLogBase.MetricDataLogLineRegex, TimeSpan.FromMinutes(2)); + _fixture.AgentLog.WaitForLogLine(AgentLogBase.TransactionTransformCompletedLogLineRegex, + TimeSpan.FromMinutes(2)); + }); + + _fixture.Initialize(); + } + + [Fact] + public void Test() + { + // get all span events + var spanEvents = _fixture.AgentLog.GetSpanEvents(); + // select all span events having an Agent attribute with a key of "cloud.resource_id" + var cloudResourceIdSpanEvents = spanEvents.Where(spanEvent => spanEvent.AgentAttributes.ContainsKey("cloud.resource_id")).ToList(); + + string expectedArn = $"arn:aws:dynamodb:(unknown):{_expectedAccountId}:table/{_tableName}"; + string unExpectedArn = $"arn:aws:dynamodb:(unknown):{_unxpectedAccountId}:table/{_tableName}"; + + // verify all span events contain the expected arn, and do not contain the unexpected arn and all are of category datastore + Assert.Multiple( + () => Assert.All(cloudResourceIdSpanEvents, se => Assert.Equal(expectedArn, se.AgentAttributes["cloud.resource_id"])), + () => Assert.All(cloudResourceIdSpanEvents, se => Assert.NotEqual(_unxpectedAccountId, se.AgentAttributes["cloud.resource_id"])), + () => Assert.All(cloudResourceIdSpanEvents, se => Assert.Equal("datastore", se.IntrinsicAttributes["category"])) + ); + } +} diff --git a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkSQSTest.cs b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkSQSTest.cs index edd06af3fb..72a3278567 100644 --- a/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkSQSTest.cs +++ b/tests/Agent/IntegrationTests/ContainerIntegrationTests/Tests/AwsSdk/AwsSdkSQSTest.cs @@ -4,6 +4,7 @@ using System; using System.Collections.Generic; using System.Linq; +using NewRelic.Agent.ContainerIntegrationTests.Fixtures; using NewRelic.Agent.IntegrationTestHelpers; using NewRelic.Testing.Assertions; using Xunit; @@ -176,4 +177,3 @@ public AwsSdkSQSTestNullCollections(AwsSdkContainerSQSTestFixture fixture, ITest { } } - diff --git a/tests/Agent/IntegrationTests/SharedApplications/Common/MFALatestPackages/MFALatestPackages.csproj b/tests/Agent/IntegrationTests/SharedApplications/Common/MFALatestPackages/MFALatestPackages.csproj index 7827c51495..0839be7b2f 100644 --- a/tests/Agent/IntegrationTests/SharedApplications/Common/MFALatestPackages/MFALatestPackages.csproj +++ b/tests/Agent/IntegrationTests/SharedApplications/Common/MFALatestPackages/MFALatestPackages.csproj @@ -5,12 +5,12 @@ - - - + + + diff --git a/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Cache/LRUCacheTests.cs b/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Cache/LRUCacheTests.cs new file mode 100644 index 0000000000..fd9a5bfee8 --- /dev/null +++ b/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Cache/LRUCacheTests.cs @@ -0,0 +1,170 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using NUnit.Framework; +using NewRelic.Agent.Extensions.Caching; + +namespace Agent.Extensions.Tests.Cache +{ + [TestFixture] + public class LRUCacheTests + { + [Test] + public void Constructor_ShouldThrowException_WhenCapacityIsZeroOrNegative() + { + Assert.Throws(() => new LRUCache(0)); + Assert.Throws(() => new LRUCache(-1)); + } + + [Test] + public void Put_ShouldAddItemToCache() + { + // Arrange + var cache = new LRUCache(2); + + // Act + cache.Put(1, "one"); + + // Assert + Assert.That(cache.ContainsKey(1), Is.True); + Assert.That(cache.Get(1), Is.EqualTo("one")); + } + + [Test] + public void Get_ShouldThrowException_WhenKeyNotFound() + { + // Arrange + var cache = new LRUCache(2); + + // Act & Assert + Assert.Throws(() => cache.Get(1)); + } + + [Test] + public void Put_ShouldEvictLeastRecentlyUsedItem_WhenCapacityIsExceeded() + { + // Arrange + var cache = new LRUCache(2); + cache.Put(1, "one"); + cache.Put(2, "two"); + + // Act + cache.Put(3, "three"); + + // Assert + Assert.That(cache.ContainsKey(1), Is.False); + Assert.That(cache.ContainsKey(2), Is.True); + Assert.That(cache.ContainsKey(3), Is.True); + } + + [Test] + public void Get_ShouldMoveAccessedItemToFront() + { + // Arrange + var cache = new LRUCache(2); + cache.Put(1, "one"); + cache.Put(2, "two"); + + // Act + var value = cache.Get(1); + cache.Put(3, "three"); + + // Assert + Assert.That(cache.ContainsKey(1), Is.True); + Assert.That(cache.ContainsKey(2), Is.False); + Assert.That(cache.ContainsKey(3), Is.True); + } + + [Test] + public void Put_ShouldUpdateValue_WhenKeyAlreadyExists() + { + // Arrange + var cache = new LRUCache(2); + cache.Put(1, "one"); + + // Act + cache.Put(1, "uno"); + + // Assert + Assert.That(cache.Get(1), Is.EqualTo("uno")); + } + + [Test] + public void ContainsKey_ShouldReturnTrueForExistingKey() + { + // Arrange + var cache = new LRUCache(2); + cache.Put(1, "one"); + + // Act + var containsKey = cache.ContainsKey(1); + + // Assert + Assert.That(containsKey, Is.True); + } + + [Test] + public void ContainsKey_ShouldReturnFalseForNonExistingKey() + { + // Arrange + var cache = new LRUCache(2); + + // Act + var containsKey = cache.ContainsKey(1); + + // Assert + Assert.That(containsKey, Is.False); + } + + [Test] + public void Put_ShouldHandleEdgeCaseForCapacity() + { + // Arrange + var cache = new LRUCache(1); + cache.Put(1, "one"); + + // Act + cache.Put(2, "two"); + + // Assert + Assert.That(cache.ContainsKey(1), Is.False); + Assert.That(cache.ContainsKey(2), Is.True); + } + + [Test] + public void Cache_ShouldBeThreadSafe() + { + // Arrange + var cache = new LRUCache(100); + var putTasks = new List(); + var getTasks = new List(); + + // Act + for (int i = 0; i < 100; i++) + { + int index = i; + putTasks.Add(Task.Run(() => cache.Put(index, $"value{index}"))); + } + + Task.WaitAll(putTasks.ToArray()); + + for (int i = 0; i < 100; i++) + { + int index = i; + getTasks.Add(Task.Run(() => cache.Get(index))); + } + + Task.WaitAll(getTasks.ToArray()); + + // Assert + for (int i = 0; i < 100; i++) + { + Assert.That(cache.ContainsKey(i), Is.True); + Assert.That(cache.Get(i), Is.EqualTo($"value{i}")); + } + } + } +} diff --git a/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Cache/LRUHashSetTests.cs b/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Cache/LRUHashSetTests.cs new file mode 100644 index 0000000000..dbd100cea0 --- /dev/null +++ b/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Cache/LRUHashSetTests.cs @@ -0,0 +1,230 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using NUnit.Framework; +using NewRelic.Agent.Extensions.Caching; + +namespace Agent.Extensions.Tests.Cache +{ + [TestFixture] + public class LRUHashSetTests + { + [Test] + public void Constructor_ShouldThrowException_WhenCapacityIsZeroOrNegative() + { + Assert.Throws(() => new LRUHashSet(0)); + Assert.Throws(() => new LRUHashSet(-1)); + } + + [Test] + public void Add_ShouldAddItemToSet() + { + var set = new LRUHashSet(2); + var added = set.Add(1); + Assert.That(added, Is.True); + Assert.That(set.Contains(1), Is.True); + } + + [Test] + public void Add_ShouldNotAddDuplicateItem() + { + var set = new LRUHashSet(2); + set.Add(1); + var added = set.Add(1); + Assert.That(added, Is.False); + Assert.That(set.Contains(1), Is.True); + } + + [Test] + public void Add_ShouldEvictLeastRecentlyUsedItem_WhenCapacityIsExceeded() + { + var set = new LRUHashSet(2); + set.Add(1); + set.Add(2); + set.Add(3); + Assert.That(set.Contains(1), Is.False); + Assert.That(set.Contains(2), Is.True); + Assert.That(set.Contains(3), Is.True); + } + + [Test] + public void Contains_ShouldReturnTrueForExistingItem() + { + var set = new LRUHashSet(2); + set.Add(1); + var contains = set.Contains(1); + Assert.That(contains, Is.True); + } + + [Test] + public void Contains_ShouldReturnFalseForNonExistingItem() + { + var set = new LRUHashSet(2); + var contains = set.Contains(1); + Assert.That(contains, Is.False); + } + + [Test] + public void Remove_ShouldRemoveItemFromSet() + { + var set = new LRUHashSet(2); + set.Add(1); + var removed = set.Remove(1); + Assert.That(removed, Is.True); + Assert.That(set.Contains(1), Is.False); + } + + [Test] + public void Remove_ShouldReturnFalseForNonExistingItem() + { + var set = new LRUHashSet(2); + var removed = set.Remove(1); + Assert.That(removed, Is.False); + } + + [Test] + public void Count_ShouldReturnNumberOfItemsInSet() + { + var set = new LRUHashSet(2); + set.Add(1); + set.Add(2); + var count = set.Count; + Assert.That(count, Is.EqualTo(2)); + } + + [Test] + public void Set_ShouldBeThreadSafe() + { + var set = new LRUHashSet(100); + var addTasks = new List(); + var containsTasks = new List(); + + for (int i = 0; i < 100; i++) + { + int index = i; + addTasks.Add(Task.Run(() => set.Add(index))); + } + + Task.WaitAll(addTasks.ToArray()); + + for (int i = 0; i < 100; i++) + { + int index = i; + containsTasks.Add(Task.Run(() => set.Contains(index))); + } + + Task.WaitAll(containsTasks.ToArray()); + + for (int i = 0; i < 100; i++) + { + Assert.That(set.Contains(i), Is.True); + } + } + + [Test] + public void Add_ShouldHandleNullValues() + { + var set = new LRUHashSet(2); + var added = set.Add(null); + Assert.That(added, Is.True); + Assert.That(set.Contains(null), Is.True); + } + + [Test] + public void Add_ShouldHandleCapacityOfOne() + { + var set = new LRUHashSet(1); + set.Add(1); + set.Add(2); + Assert.That(set.Contains(1), Is.False); + Assert.That(set.Contains(2), Is.True); + } + + [Test] + public void Add_ShouldMoveAccessedItemToFront() + { + var set = new LRUHashSet(2); + set.Add(1); + set.Add(2); + set.Add(1); + set.Add(3); + Assert.That(set.Contains(1), Is.True); + Assert.That(set.Contains(2), Is.False); + Assert.That(set.Contains(3), Is.True); + } + + [Test] + public void ConcurrentAddAndRemove_ShouldBeThreadSafe() + { + var set = new LRUHashSet(100); + var addTasks = new List(); + var removeTasks = new List(); + + // Add items concurrently + for (int i = 0; i < 100; i++) + { + int index = i; + addTasks.Add(Task.Run(() => set.Add(index))); + } + + Task.WaitAll(addTasks.ToArray()); + + // Remove items concurrently + for (int i = 0; i < 100; i++) + { + int index = i; + removeTasks.Add(Task.Run(() => set.Remove(index))); + } + + Task.WaitAll(removeTasks.ToArray()); + + // Verify that all items have been removed + for (int i = 0; i < 100; i++) + { + Assert.That(set.Contains(i), Is.False); + } + } + + [Test] + public void Add_ShouldHandleLargeNumberOfItems() + { + var set = new LRUHashSet(1000); + for (int i = 0; i < 1000; i++) + { + set.Add(i); + } + for (int i = 0; i < 1000; i++) + { + Assert.That(set.Contains(i), Is.True); + } + } + + [Test] + public void Add_ShouldHandleDuplicateAdditions() + { + var set = new LRUHashSet(2); + set.Add(1); + set.Add(1); + Assert.That(set.Count, Is.EqualTo(1)); + } + + [Test] + public void Remove_ShouldHandleNonExistentItem() + { + var set = new LRUHashSet(2); + var removed = set.Remove(1); + Assert.That(removed, Is.False); + } + + [Test] + public void Add_ShouldHandleMaxCapacity() + { + var set = new LRUHashSet(int.MaxValue); + set.Add(1); + Assert.That(set.Contains(1), Is.True); + } + } +} diff --git a/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Cache/WeakReferenceKeyTests.cs b/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Cache/WeakReferenceKeyTests.cs new file mode 100644 index 0000000000..375780f36b --- /dev/null +++ b/tests/Agent/UnitTests/NewRelic.Agent.Extensions.Tests/Cache/WeakReferenceKeyTests.cs @@ -0,0 +1,177 @@ +// Copyright 2020 New Relic, Inc. All rights reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System; +using System.Threading.Tasks; +using NewRelic.Agent.Extensions.Caching; +using NUnit.Framework; + +namespace Agent.Extensions.Tests.Cache +{ + [TestFixture] + public class WeakReferenceKeyTests + { + [Test] + public void Constructor_ShouldInitializeWeakReference() + { + // Arrange + var foo = new Foo(); + + // Act + var weakReferenceKey = new WeakReferenceKey(foo); + + // Assert + Assert.Multiple(() => + { + Assert.That(weakReferenceKey, Is.Not.Null); + Assert.That(weakReferenceKey.Value, Is.Not.Null); + Assert.That(weakReferenceKey.Value, Is.SameAs(foo)); + }); + } + + [Test] + public void Equals_ShouldReturnTrueForSameObject() + { + // Arrange + var foo = new Foo(); + var weakReferenceKey1 = new WeakReferenceKey(foo); + var weakReferenceKey2 = new WeakReferenceKey(foo); + + // Act + var result = weakReferenceKey1.Equals(weakReferenceKey2); + + // Assert + Assert.That(result, Is.True); + } + + [Test] + public void Equals_ShouldReturnFalseForDifferentObjects() + { + // Arrange + var foo1 = new Foo(); + var foo2 = new Foo(); + var weakReferenceKey1 = new WeakReferenceKey(foo1); + var weakReferenceKey2 = new WeakReferenceKey(foo2); + + // Act + var result = weakReferenceKey1.Equals(weakReferenceKey2); + + // Assert + Assert.That(result, Is.False); + } + + [Test] + public void GetHashCode_ShouldReturnSameHashCodeForSameObject() + { + // Arrange + var foo = new Foo(); + var weakReferenceKey1 = new WeakReferenceKey(foo); + var weakReferenceKey2 = new WeakReferenceKey(foo); + + // Act + var hashCode1 = weakReferenceKey1.GetHashCode(); + var hashCode2 = weakReferenceKey2.GetHashCode(); + + // Assert + Assert.That(hashCode1, Is.EqualTo(hashCode2)); + } + + [Test] + public void GetHashCode_ShouldReturnDifferentHashCodeForDifferentObjects() + { + // Arrange + var foo1 = new Foo(); + var foo2 = new Foo(); + var weakReferenceKey1 = new WeakReferenceKey(foo1); + var weakReferenceKey2 = new WeakReferenceKey(foo2); + + // Act + var hashCode1 = weakReferenceKey1.GetHashCode(); + var hashCode2 = weakReferenceKey2.GetHashCode(); + + // Assert + Assert.That(hashCode1, Is.Not.EqualTo(hashCode2)); + } + + [Test] + public async Task GetHashCode_ShouldReturnZeroIfTargetIsGarbageCollected() + { + // Arrange + var weakRefKey = GetWeakReferenceKey(); + + // Act + Assert.That(weakRefKey.Value, Is.Not.Null); + // force garbage collection + GC.Collect(); + GC.WaitForPendingFinalizers(); + await Task.Delay(500); + GC.Collect(); // Force another collection + + // Assert + Assert.That(weakRefKey.GetHashCode(), Is.EqualTo(0)); + } + + [Test] + public async Task Value_ShouldReturnNullIfTargetIsGarbageCollected() + { + // Arrange + var weakRefKey = GetWeakReferenceKey(); + + // Act + Assert.That(weakRefKey.Value, Is.Not.Null); + // force garbage collection + GC.Collect(); + GC.WaitForPendingFinalizers(); + await Task.Delay(500); + GC.Collect(); // Force another collection + + // Assert + Assert.That(weakRefKey.Value, Is.Null); + } + + private WeakReferenceKey GetWeakReferenceKey() + { + var foo = new Foo(); + return new WeakReferenceKey(foo); + } + [Test] + public void Equals_ShouldReturnFalseForNonWeakReferenceKeyObject() + { + // Arrange + var foo = new Foo(); + var weakReferenceKey = new WeakReferenceKey(foo); + + // Act + var result = weakReferenceKey.Equals(new object()); + + // Assert + Assert.That(result, Is.False); + } + + [Test] + public async Task Equals_ShouldReturnFalseIfTargetIsGarbageCollected() + { + // Arrange + var weakRefKey1 = GetWeakReferenceKey(); + var weakRefKey2 = GetWeakReferenceKey(); + + // Act + Assert.That(weakRefKey1.Value, Is.Not.Null); + Assert.That(weakRefKey2.Value, Is.Not.Null); + // force garbage collection + GC.Collect(); + GC.WaitForPendingFinalizers(); + await Task.Delay(500); + GC.Collect(); // Force another collection + + // Assert + Assert.That(weakRefKey1.Equals(weakRefKey2), Is.False); + } + + private class Foo + { + public string Bar { get; set; } + } + } + +}